Merge branch 'master' into glitch-soc/merge-upstream

This commit is contained in:
Thibaut Girka
2018-05-29 13:52:26 +02:00
37 changed files with 553 additions and 93 deletions

View File

@@ -41,7 +41,7 @@ module Remotable
rescue HTTP::TimeoutError, HTTP::ConnectionError, OpenSSL::SSL::SSLError, Paperclip::Errors::NotIdentifiedByImageMagickError, Addressable::URI::InvalidURIError, Mastodon::HostValidationError, Mastodon::LengthValidationError => e
Rails.logger.debug "Error fetching remote #{attachment_name}: #{e}"
nil
rescue Paperclip::Error => e
rescue Paperclip::Error, Mastodon::DimensionsValidationError => e
Rails.logger.debug "Error processing remote #{attachment_name}: #{e}"
nil
end

View File

@@ -195,12 +195,45 @@ class Status < ApplicationRecord
where(account: [account] + account.following).where(visibility: [:public, :unlisted, :private])
end
def as_direct_timeline(account)
query = joins("LEFT OUTER JOIN mentions ON statuses.id = mentions.status_id AND mentions.account_id = #{account.id}")
.where("mentions.account_id = #{account.id} OR statuses.account_id = #{account.id}")
.where(visibility: [:direct])
def as_direct_timeline(account, limit = 20, max_id = nil, since_id = nil, cache_ids = false)
# direct timeline is mix of direct message from_me and to_me.
# 2 querys are executed with pagination.
# constant expression using arel_table is required for partial index
apply_timeline_filters(query, account, false)
# _from_me part does not require any timeline filters
query_from_me = where(account_id: account.id)
.where(Status.arel_table[:visibility].eq(3))
.limit(limit)
.order('statuses.id DESC')
# _to_me part requires mute and block filter.
# FIXME: may we check mutes.hide_notifications?
query_to_me = Status
.joins(:mentions)
.merge(Mention.where(account_id: account.id))
.where(Status.arel_table[:visibility].eq(3))
.limit(limit)
.order('mentions.status_id DESC')
.not_excluded_by_account(account)
if max_id.present?
query_from_me = query_from_me.where('statuses.id < ?', max_id)
query_to_me = query_to_me.where('mentions.status_id < ?', max_id)
end
if since_id.present?
query_from_me = query_from_me.where('statuses.id > ?', since_id)
query_to_me = query_to_me.where('mentions.status_id > ?', since_id)
end
if cache_ids
# returns array of cache_ids object that have id and updated_at
(query_from_me.cache_ids.to_a + query_to_me.cache_ids.to_a).uniq(&:id).sort_by(&:id).reverse.take(limit)
else
# returns ActiveRecord.Relation
items = (query_from_me.select(:id).to_a + query_to_me.select(:id).to_a).uniq(&:id).sort_by(&:id).reverse.take(limit)
Status.where(id: items.map(&:id))
end
end
def as_public_timeline(account = nil, local_only = false)

View File

@@ -21,6 +21,22 @@ class Tag < ApplicationRecord
name
end
def history
days = []
7.times do |i|
day = i.days.ago.beginning_of_day.to_i
days << {
day: day.to_s,
uses: Redis.current.get("activity:tags:#{id}:#{day}") || '0',
accounts: Redis.current.pfcount("activity:tags:#{id}:#{day}:accounts").to_s,
}
end
days
end
class << self
def search_for(term, limit = 5)
pattern = sanitize_sql_like(term.strip) + '%'

View File

@@ -0,0 +1,61 @@
# frozen_string_literal: true
class TrendingTags
KEY = 'trending_tags'
HALF_LIFE = 1.day.to_i
MAX_ITEMS = 500
EXPIRE_HISTORY_AFTER = 7.days.seconds
class << self
def record_use!(tag, account, at_time = Time.now.utc)
return if disallowed_hashtags.include?(tag.name) || account.silenced?
increment_vote!(tag.id, at_time)
increment_historical_use!(tag.id, at_time)
increment_unique_use!(tag.id, account.id, at_time)
end
def get(limit)
tag_ids = redis.zrevrange(KEY, 0, limit).map(&:to_i)
tags = Tag.where(id: tag_ids).to_a.map { |tag| [tag.id, tag] }.to_h
tag_ids.map { |tag_id| tags[tag_id] }.compact
end
private
def increment_vote!(tag_id, at_time)
redis.zincrby(KEY, (2**((at_time.to_i - epoch) / HALF_LIFE)).to_f, tag_id.to_s)
redis.zremrangebyrank(KEY, 0, -MAX_ITEMS) if rand < (2.to_f / MAX_ITEMS)
end
def increment_historical_use!(tag_id, at_time)
key = "activity:tags:#{tag_id}:#{at_time.beginning_of_day.to_i}"
redis.incrby(key, 1)
redis.expire(key, EXPIRE_HISTORY_AFTER)
end
def increment_unique_use!(tag_id, account_id, at_time)
key = "activity:tags:#{tag_id}:#{at_time.beginning_of_day.to_i}:accounts"
redis.pfadd(key, account_id)
redis.expire(key, EXPIRE_HISTORY_AFTER)
end
# The epoch needs to be 2.5 years in the future if the half-life is one day
# While dynamic, it will always be the same within one year
def epoch
@epoch ||= Date.new(Date.current.year + 2.5, 10, 1).to_datetime.to_i
end
def disallowed_hashtags
return @disallowed_hashtags if defined?(@disallowed_hashtags)
@disallowed_hashtags = Setting.disallowed_hashtags.nil? ? [] : Setting.disallowed_hashtags
@disallowed_hashtags = @disallowed_hashtags.split(' ') if @disallowed_hashtags.is_a? String
@disallowed_hashtags = @disallowed_hashtags.map(&:downcase)
end
def redis
Redis.current
end
end
end