Add digest re-check before removing followers in synchronization mechanism (#34273)
This commit is contained in:
@@ -8,6 +8,6 @@ class ActivityPub::PrepareFollowersSynchronizationService < BaseService
|
||||
|
||||
return if params['collectionId'] != @account.followers_url || non_matching_uri_hosts?(@account.uri, params['url']) || @account.local_followers_hash == params['digest']
|
||||
|
||||
ActivityPub::FollowersSynchronizationWorker.perform_async(@account.id, params['url'])
|
||||
ActivityPub::FollowersSynchronizationWorker.perform_async(@account.id, params['url'], params['digest'])
|
||||
end
|
||||
end
|
||||
|
||||
@@ -6,13 +6,15 @@ class ActivityPub::SynchronizeFollowersService < BaseService
|
||||
|
||||
MAX_COLLECTION_PAGES = 10
|
||||
|
||||
def call(account, partial_collection_url)
|
||||
def call(account, partial_collection_url, expected_digest = nil)
|
||||
@account = account
|
||||
@expected_followers_ids = []
|
||||
@digest = [expected_digest].pack('H*') if expected_digest.present?
|
||||
|
||||
return unless process_collection!(partial_collection_url)
|
||||
|
||||
remove_unexpected_local_followers!
|
||||
# Only remove followers if the digests match, as it is a destructive operation
|
||||
remove_unexpected_local_followers! if expected_digest.blank? || @digest == "\x00" * 32
|
||||
end
|
||||
|
||||
private
|
||||
@@ -21,6 +23,8 @@ class ActivityPub::SynchronizeFollowersService < BaseService
|
||||
page_expected_followers = extract_local_followers(items)
|
||||
@expected_followers_ids.concat(page_expected_followers.pluck(:id))
|
||||
|
||||
items.each { |uri| Xorcist.xor!(@digest, Digest::SHA256.digest(uri)) } if @digest.present?
|
||||
|
||||
handle_unexpected_outgoing_follows!(page_expected_followers)
|
||||
end
|
||||
|
||||
|
||||
Reference in New Issue
Block a user