[feed manager] Gut the feed manager

This commit is contained in:
Fosco Marotto 2021-01-12 00:17:19 -05:00
parent 22ec66d07f
commit 5e6211a4f4
3 changed files with 20 additions and 141 deletions

View File

@ -1,6 +1,6 @@
# frozen_string_literal: true
class Api::V1::AccountByUsernameController < API::BaseController
class Api::V1::AccountByUsernameController < EmptyController
before_action :set_account
before_action :check_account_suspension
before_action :check_account_local

View File

@ -42,7 +42,7 @@ class EmptyController < ActionController::Base
links << [prev_path, [%w(rel prev)]] if prev_path
response.headers['Link'] = LinkHeader.new(links) unless links.empty?
end
def current_user
nil
end
@ -55,6 +55,10 @@ class EmptyController < ActionController::Base
nil
end
def gone
respond_with_error(410)
end
def cache_collection(raw, klass)
return raw unless klass.respond_to?(:with_includes)
@ -83,6 +87,6 @@ class EmptyController < ActionController::Base
def truthy_param?(key)
ActiveModel::Type::Boolean.new.cast(params[key])
end
end

View File

@ -31,111 +31,50 @@ class FeedManager
end
def push_to_home(account, status)
return false unless add_to_feed(:home, account.id, status, account.user&.aggregates_reblogs?)
trim(:home, account.id)
PushUpdateWorker.perform_async(account.id, status.id, "timeline:#{account.id}") if push_update_required?("timeline:#{account.id}")
true
# return false unless add_to_feed(:home, account.id, status, account.user&.aggregates_reblogs?)
# trim(:home, account.id)
# PushUpdateWorker.perform_async(account.id, status.id, "timeline:#{account.id}") if push_update_required?("timeline:#{account.id}")
true
end
def unpush_from_home(account, status)
return false unless remove_from_feed(:home, account.id, status)
redis.publish("timeline:#{account.id}", Oj.dump(event: :delete, payload: status.id.to_s))
true
# return false unless remove_from_feed(:home, account.id, status)
# redis.publish("timeline:#{account.id}", Oj.dump(event: :delete, payload: status.id.to_s))
# true
end
def unpush_from_list(list, status)
return false unless remove_from_feed(:list, list.id, status)
redis.publish("timeline:list:#{list.id}", Oj.dump(event: :delete, payload: status.id.to_s))
true
# return false unless remove_from_feed(:list, list.id, status)
# redis.publish("timeline:list:#{list.id}", Oj.dump(event: :delete, payload: status.id.to_s))
# true
end
def trim(type, account_id)
timeline_key = key(type, account_id)
reblog_key = key(type, account_id, 'reblogs')
# Remove any items past the MAX_ITEMS'th entry in our feed
redis.zremrangebyrank(timeline_key, '0', (-(FeedManager::MAX_ITEMS + 1)).to_s)
# Get the score of the REBLOG_FALLOFF'th item in our feed, and stop
# tracking anything after it for deduplication purposes.
falloff_rank = FeedManager::REBLOG_FALLOFF - 1
falloff_range = redis.zrevrange(timeline_key, falloff_rank, falloff_rank, with_scores: true)
falloff_score = falloff_range&.first&.last&.to_i || 0
# Get any reblogs we might have to clean up after.
redis.zrangebyscore(reblog_key, 0, falloff_score).each do |reblogged_id|
# Remove it from the set of reblogs we're tracking *first* to avoid races.
redis.zrem(reblog_key, reblogged_id)
# Just drop any set we might have created to track additional reblogs.
# This means that if this reblog is deleted, we won't automatically insert
# another reblog, but also that any new reblog can be inserted into the
# feed.
redis.del(key(type, account_id, "reblogs:#{reblogged_id}"))
end
end
def merge_into_timeline(from_account, into_account)
timeline_key = key(:home, into_account.id)
query = from_account.statuses.limit(FeedManager::MAX_ITEMS / 4)
if redis.zcard(timeline_key) >= FeedManager::MAX_ITEMS / 4
oldest_home_score = redis.zrange(timeline_key, 0, 0, with_scores: true)&.first&.last&.to_i || 0
query = query.where('id > ?', oldest_home_score)
end
query.each do |status|
next if status.limited_visibility? || filter?(:home, status, into_account)
add_to_feed(:home, into_account.id, status, into_account.user&.aggregates_reblogs?)
end
trim(:home, into_account.id)
end
def unmerge_from_timeline(from_account, into_account)
timeline_key = key(:home, into_account.id)
oldest_home_score = redis.zrange(timeline_key, 0, 0, with_scores: true)&.first&.last&.to_i || 0
from_account.statuses.select('id, reblog_of_id').where('id > ?', oldest_home_score).reorder(nil).find_each do |status|
remove_from_feed(:home, into_account.id, status)
end
end
def clear_from_timeline(account, target_account)
timeline_key = key(:home, account.id)
timeline_status_ids = redis.zrange(timeline_key, 0, -1)
target_statuses = Status.where(id: timeline_status_ids, account: target_account)
target_statuses.each do |status|
unpush_from_home(account, status)
end
end
def populate_feed(account)
added = 0
limit = FeedManager::MAX_ITEMS / 2
max_id = nil
loop do
statuses = Status.as_home_timeline(account)
.paginate_by_max_id(limit, max_id)
break if statuses.empty?
statuses.each do |status|
next if filter_from_home?(status, account.id)
added += 1 if add_to_feed(:home, account.id, status, account.user&.aggregates_reblogs?)
end
break unless added.zero?
max_id = statuses.last.id
end
end
private
def push_update_required?(timeline_id)
redis.exists?("subscribed:#{timeline_id}")
end
def blocks_or_mutes?(receiver_id, account_ids, context)
@ -171,7 +110,7 @@ class FeedManager
should_filter ||= Block.where(account_id: status.reblog.account_id, target_account_id: receiver_id).exists? # or if the author of the reblogged status is blocking me
return should_filter
end
return false if status.group_id
false
@ -258,45 +197,6 @@ class FeedManager
# an internal helper: callers must call trim or push updates if
# either action is appropriate.
def add_to_feed(timeline_type, account_id, status, aggregate_reblogs = true)
timeline_key = key(timeline_type, account_id)
reblog_key = key(timeline_type, account_id, 'reblogs')
if status.reblog? && (aggregate_reblogs.nil? || aggregate_reblogs)
# If the original status or a reblog of it is within
# REBLOG_FALLOFF statuses from the top, do not re-insert it into
# the feed
rank = redis.zrevrank(timeline_key, status.reblog_of_id)
return false if !rank.nil? && rank < FeedManager::REBLOG_FALLOFF
reblog_rank = redis.zrevrank(reblog_key, status.reblog_of_id)
if reblog_rank.nil?
# This is not something we've already seen reblogged, so we
# can just add it to the feed (and note that we're
# reblogging it).
redis.zadd(timeline_key, status.id, status.id)
redis.zadd(reblog_key, status.id, status.reblog_of_id)
else
# Another reblog of the same status was already in the
# REBLOG_FALLOFF most recent statuses, so we note that this
# is an "extra" reblog, by storing it in reblog_set_key.
reblog_set_key = key(timeline_type, account_id, "reblogs:#{status.reblog_of_id}")
redis.sadd(reblog_set_key, status.id)
return false
end
else
# A reblog may reach earlier than the original status because of the
# delay of the worker deliverying the original status, the late addition
# by merging timelines, and other reasons.
# If such a reblog already exists, just do not re-insert it into the feed.
rank = redis.zrevrank(reblog_key, status.id)
return false unless rank.nil?
redis.zadd(timeline_key, status.id, status.id)
end
true
end
@ -305,32 +205,7 @@ class FeedManager
# `add_to_feed`, this does not trigger push updates, so callers must
# do so if appropriate.
def remove_from_feed(timeline_type, account_id, status)
timeline_key = key(timeline_type, account_id)
if status.reblog?
# 1. If the reblogging status is not in the feed, stop.
status_rank = redis.zrevrank(timeline_key, status.id)
return false if status_rank.nil?
# 2. Remove reblog from set of this status's reblogs.
reblog_set_key = key(timeline_type, account_id, "reblogs:#{status.reblog_of_id}")
redis.srem(reblog_set_key, status.id)
# 3. Re-insert another reblog or original into the feed if one
# remains in the set. We could pick a random element, but this
# set should generally be small, and it seems ideal to show the
# oldest potential such reblog.
other_reblog = redis.smembers(reblog_set_key).map(&:to_i).min
redis.zadd(timeline_key, other_reblog, other_reblog) if other_reblog
# 4. Remove the reblogging status from the feed (as normal)
# (outside conditional)
else
# If the original is getting deleted, no use for reblog references
redis.del(key(timeline_type, account_id, "reblogs:#{status.id}"))
end
redis.zrem(timeline_key, status.id)
end
end