闭社主体 forked from https://github.com/tootsuite/mastodon
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

155 lines
5.1 KiB

  1. # frozen_string_literal: true
  2. class BatchedRemoveStatusService < BaseService
  3. include StreamEntryRenderer
  4. # Delete given statuses and reblogs of them
  5. # Dispatch PuSH updates of the deleted statuses, but only local ones
  6. # Dispatch Salmon deletes, unique per domain, of the deleted statuses, but only local ones
  7. # Remove statuses from home feeds
  8. # Push delete events to streaming API for home feeds and public feeds
  9. # @param [Status] statuses A preferably batched array of statuses
  10. def call(statuses)
  11. statuses = Status.where(id: statuses.map(&:id)).includes(:account, :stream_entry).flat_map { |status| [status] + status.reblogs.includes(:account, :stream_entry).to_a }
  12. @mentions = statuses.map { |s| [s.id, s.mentions.includes(:account).to_a] }.to_h
  13. @tags = statuses.map { |s| [s.id, s.tags.pluck(:name)] }.to_h
  14. @stream_entry_batches = []
  15. @salmon_batches = []
  16. @activity_json_batches = []
  17. @json_payloads = statuses.map { |s| [s.id, Oj.dump(event: :delete, payload: s.id)] }.to_h
  18. @activity_json = {}
  19. @activity_xml = {}
  20. # Ensure that rendered XML reflects destroyed state
  21. statuses.each(&:destroy)
  22. # Batch by source account
  23. statuses.group_by(&:account_id).each do |_, account_statuses|
  24. account = account_statuses.first.account
  25. unpush_from_home_timelines(account_statuses)
  26. if account.local?
  27. batch_stream_entries(account, account_statuses)
  28. batch_activity_json(account, account_statuses)
  29. end
  30. end
  31. # Cannot be batched
  32. statuses.each do |status|
  33. unpush_from_public_timelines(status)
  34. batch_salmon_slaps(status) if status.local?
  35. end
  36. Pubsubhubbub::RawDistributionWorker.push_bulk(@stream_entry_batches) { |batch| batch }
  37. NotificationWorker.push_bulk(@salmon_batches) { |batch| batch }
  38. ActivityPub::DeliveryWorker.push_bulk(@activity_json_batches) { |batch| batch }
  39. end
  40. private
  41. def batch_stream_entries(account, statuses)
  42. statuses.each do |status|
  43. @stream_entry_batches << [build_xml(status.stream_entry), account.id]
  44. end
  45. end
  46. def batch_activity_json(account, statuses)
  47. account.followers.inboxes.each do |inbox_url|
  48. statuses.each do |status|
  49. @activity_json_batches << [build_json(status), account.id, inbox_url]
  50. end
  51. end
  52. statuses.each do |status|
  53. other_recipients = (status.mentions + status.reblogs).map(&:account).reject(&:local?).select(&:activitypub?).uniq(&:id)
  54. other_recipients.each do |target_account|
  55. @activity_json_batches << [build_json(status), account.id, target_account.inbox_url]
  56. end
  57. end
  58. end
  59. def unpush_from_home_timelines(statuses)
  60. account = statuses.first.account
  61. recipients = account.followers.local.pluck(:id)
  62. recipients << account.id if account.local?
  63. recipients.each do |follower_id|
  64. unpush(follower_id, statuses)
  65. end
  66. end
  67. def unpush_from_public_timelines(status)
  68. payload = @json_payloads[status.id]
  69. redis.pipelined do
  70. redis.publish('timeline:public', payload)
  71. redis.publish('timeline:public:local', payload) if status.local?
  72. @tags[status.id].each do |hashtag|
  73. redis.publish("timeline:hashtag:#{hashtag}", payload)
  74. redis.publish("timeline:hashtag:#{hashtag}:local", payload) if status.local?
  75. end
  76. end
  77. end
  78. def batch_salmon_slaps(status)
  79. return if @mentions[status.id].empty?
  80. recipients = @mentions[status.id].map(&:account).reject(&:local?).select(&:ostatus?).uniq(&:domain).map(&:id)
  81. recipients.each do |recipient_id|
  82. @salmon_batches << [build_xml(status.stream_entry), status.account_id, recipient_id]
  83. end
  84. end
  85. def unpush(follower_id, statuses)
  86. key = FeedManager.instance.key(:home, follower_id)
  87. originals = statuses.reject(&:reblog?)
  88. reblogs = statuses.select(&:reblog?)
  89. # Quickly remove all originals
  90. redis.pipelined do
  91. originals.each do |status|
  92. redis.zremrangebyscore(key, status.id, status.id)
  93. redis.publish("timeline:#{follower_id}", @json_payloads[status.id])
  94. end
  95. end
  96. # For reblogs, re-add original status to feed, unless the reblog
  97. # was not in the feed in the first place
  98. reblogs.each do |status|
  99. redis.zadd(key, status.reblog_of_id, status.reblog_of_id) unless redis.zscore(key, status.reblog_of_id).nil?
  100. redis.publish("timeline:#{follower_id}", @json_payloads[status.id])
  101. end
  102. end
  103. def redis
  104. Redis.current
  105. end
  106. def build_json(status)
  107. return @activity_json[status.id] if @activity_json.key?(status.id)
  108. @activity_json[status.id] = sign_json(status, ActiveModelSerializers::SerializableResource.new(
  109. status,
  110. serializer: status.reblog? ? ActivityPub::UndoAnnounceSerializer : ActivityPub::DeleteSerializer,
  111. adapter: ActivityPub::Adapter
  112. ).as_json)
  113. end
  114. def build_xml(stream_entry)
  115. return @activity_xml[stream_entry.id] if @activity_xml.key?(stream_entry.id)
  116. @activity_xml[stream_entry.id] = stream_entry_to_xml(stream_entry)
  117. end
  118. def sign_json(status, json)
  119. Oj.dump(ActivityPub::LinkedDataSignature.new(json).sign!(status.account))
  120. end
  121. end