2016-11-15 10:56:29 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-03-08 14:16:11 -05:00
|
|
|
class Feed
|
|
|
|
def initialize(type, account)
|
|
|
|
@type = type
|
|
|
|
@account = account
|
|
|
|
end
|
|
|
|
|
2016-10-02 16:35:27 -04:00
|
|
|
def get(limit, max_id = nil, since_id = nil)
|
|
|
|
max_id = '+inf' if max_id.blank?
|
|
|
|
since_id = '-inf' if since_id.blank?
|
2016-10-22 13:38:47 -04:00
|
|
|
unhydrated = redis.zrevrangebyscore(key, "(#{max_id}", "(#{since_id}", limit: [0, limit], with_scores: true).map(&:last).map(&:to_i)
|
2016-03-08 14:16:11 -05:00
|
|
|
|
|
|
|
# If we're after most recent items and none are there, we need to precompute the feed
|
2016-10-19 12:20:19 -04:00
|
|
|
if unhydrated.empty? && max_id == '+inf' && since_id == '-inf'
|
2016-11-07 20:08:32 -05:00
|
|
|
RegenerationWorker.perform_async(@account.id, @type)
|
2016-12-03 12:21:26 -05:00
|
|
|
@statuses = Status.send("as_#{@type}_timeline", @account).cache_ids.paginate_by_max_id(limit, nil, nil)
|
2016-03-24 21:13:30 -04:00
|
|
|
else
|
2016-12-03 12:21:26 -05:00
|
|
|
status_map = Status.where(id: unhydrated).cache_ids.map { |s| [s.id, s] }.to_h
|
2016-11-29 09:49:39 -05:00
|
|
|
@statuses = unhydrated.map { |id| status_map[id] }.compact
|
2016-03-24 21:13:30 -04:00
|
|
|
end
|
2016-11-09 11:48:44 -05:00
|
|
|
|
|
|
|
@statuses
|
2016-03-08 14:16:11 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def key
|
2016-09-09 14:04:34 -04:00
|
|
|
FeedManager.instance.key(@type, @account.id)
|
2016-03-08 14:16:11 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def redis
|
2016-11-15 10:56:29 -05:00
|
|
|
Redis.current
|
2016-03-08 14:16:11 -05:00
|
|
|
end
|
|
|
|
end
|