Merge branch 'main' into glitch-soc/merge-upstream

Conflicts:
- `.github/dependabot.yml`:
  Upstream made changes while we have dropped this file.
  Keep the file deleted.
- `.prettierignore`:
  Upstream made changes at the end of the file, where we
  had our extra lines.
  Just moved our extra lines back at the end.
- `app/serializers/initial_state_serializer.rb`:
  Upstream code style changes.
  Applied them.
- `app/services/backup_service.rb`:
  Upstream code style changes.
  Applied them.
This commit is contained in:
Claire
2023-02-19 10:42:55 +01:00
391 changed files with 6713 additions and 3145 deletions

View File

@@ -27,14 +27,12 @@ class ActivityTracker
(start_at.to_date...end_at.to_date).map do |date|
key = key_at(date.to_time(:utc))
value = begin
case @type
when :basic
redis.get(key).to_i
when :unique
redis.pfcount(key)
end
end
value = case @type
when :basic
redis.get(key).to_i
when :unique
redis.pfcount(key)
end
[date, value]
end

View File

@@ -108,26 +108,24 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
def process_status_params
@status_parser = ActivityPub::Parser::StatusParser.new(@json, followers_collection: @account.followers_url)
@params = begin
{
uri: @status_parser.uri,
url: @status_parser.url || @status_parser.uri,
account: @account,
text: converted_object_type? ? converted_text : (@status_parser.text || ''),
language: @status_parser.language,
spoiler_text: converted_object_type? ? '' : (@status_parser.spoiler_text || ''),
created_at: @status_parser.created_at,
edited_at: @status_parser.edited_at && @status_parser.edited_at != @status_parser.created_at ? @status_parser.edited_at : nil,
override_timestamps: @options[:override_timestamps],
reply: @status_parser.reply,
sensitive: @account.sensitized? || @status_parser.sensitive || false,
visibility: @status_parser.visibility,
thread: replied_to_status,
conversation: conversation_from_uri(@object['conversation']),
media_attachment_ids: process_attachments.take(4).map(&:id),
poll: process_poll,
}
end
@params = {
uri: @status_parser.uri,
url: @status_parser.url || @status_parser.uri,
account: @account,
text: converted_object_type? ? converted_text : (@status_parser.text || ''),
language: @status_parser.language,
spoiler_text: converted_object_type? ? '' : (@status_parser.spoiler_text || ''),
created_at: @status_parser.created_at,
edited_at: @status_parser.edited_at && @status_parser.edited_at != @status_parser.created_at ? @status_parser.edited_at : nil,
override_timestamps: @options[:override_timestamps],
reply: @status_parser.reply,
sensitive: @account.sensitized? || @status_parser.sensitive || false,
visibility: @status_parser.visibility,
thread: replied_to_status,
conversation: conversation_from_uri(@object['conversation']),
media_attachment_ids: process_attachments.take(4).map(&:id),
poll: process_poll,
}
end
def process_audience
@@ -327,7 +325,7 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
def resolve_thread(status)
return unless status.reply? && status.thread.nil? && Request.valid_url?(in_reply_to_uri)
ThreadResolveWorker.perform_async(status.id, in_reply_to_uri, { 'request_id' => @options[:request_id]})
ThreadResolveWorker.perform_async(status.id, in_reply_to_uri, { 'request_id' => @options[:request_id] })
end
def fetch_replies(status)
@@ -338,7 +336,7 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
return unless replies.nil?
uri = value_or_id(collection)
ActivityPub::FetchRepliesWorker.perform_async(status.id, uri, { 'request_id' => @options[:request_id]}) unless uri.nil?
ActivityPub::FetchRepliesWorker.perform_async(status.id, uri, { 'request_id' => @options[:request_id] }) unless uri.nil?
end
def conversation_from_uri(uri)

View File

@@ -28,13 +28,11 @@ class ActivityPub::Forwarder
end
def signature_account_id
@signature_account_id ||= begin
if in_reply_to_local?
in_reply_to.account_id
else
reblogged_by_account_ids.first
end
end
@signature_account_id ||= if in_reply_to_local?
in_reply_to.account_id
else
reblogged_by_account_ids.first
end
end
def inboxes

View File

@@ -27,9 +27,7 @@ class ActivityPub::LinkedDataSignature
document_hash = hash(@json.without('signature'))
to_be_verified = options_hash + document_hash
if creator.keypair.public_key.verify(OpenSSL::Digest.new('SHA256'), Base64.decode64(signature), to_be_verified)
creator
end
creator if creator.keypair.public_key.verify(OpenSSL::Digest.new('SHA256'), Base64.decode64(signature), to_be_verified)
end
def sign!(creator, sign_with: nil)

View File

@@ -50,9 +50,7 @@ class ActivityPub::Parser::MediaAttachmentParser
components = begin
blurhash = @json['blurhash']
if blurhash.present? && /^[\w#$%*+,-.:;=?@\[\]^{|}~]+$/.match?(blurhash)
Blurhash.components(blurhash)
end
Blurhash.components(blurhash) if blurhash.present? && /^[\w#$%*+,-.:;=?@\[\]^{|}~]+$/.match?(blurhash)
end
components.present? && components.none? { |comp| comp > 5 }

View File

@@ -58,12 +58,10 @@ class Admin::Metrics::Dimension::SoftwareVersionsDimension < Admin::Metrics::Dim
end
def redis_info
@redis_info ||= begin
if redis.is_a?(Redis::Namespace)
redis.redis.info
else
redis.info
end
end
@redis_info ||= if redis.is_a?(Redis::Namespace)
redis.redis.info
else
redis.info
end
end
end

View File

@@ -59,12 +59,10 @@ class Admin::Metrics::Dimension::SpaceUsageDimension < Admin::Metrics::Dimension
end
def redis_info
@redis_info ||= begin
if redis.is_a?(Redis::Namespace)
redis.redis.info
else
redis.info
end
end
@redis_info ||= if redis.is_a?(Redis::Namespace)
redis.redis.info
else
redis.info
end
end
end

View File

@@ -8,12 +8,10 @@ module Extractor
module_function
def extract_entities_with_indices(text, options = {}, &block)
entities = begin
extract_urls_with_indices(text, options) +
extract_hashtags_with_indices(text, check_url_overlap: false) +
extract_mentions_or_lists_with_indices(text) +
extract_extra_uris_with_indices(text)
end
entities = extract_urls_with_indices(text, options) +
extract_hashtags_with_indices(text, check_url_overlap: false) +
extract_mentions_or_lists_with_indices(text) +
extract_extra_uris_with_indices(text)
return [] if entities.empty?
@@ -29,7 +27,7 @@ module Extractor
text.scan(Account::MENTION_RE) do |screen_name, _|
match_data = $LAST_MATCH_INFO
after = $'
after = ::Regexp.last_match.post_match
unless Twitter::TwitterText::Regex[:end_mention_match].match?(after)
_, domain = screen_name.split('@')
@@ -64,7 +62,7 @@ module Extractor
match_data = $LAST_MATCH_INFO
start_position = match_data.char_begin(1) - 1
end_position = match_data.char_end(1)
after = $'
after = ::Regexp.last_match.post_match
if %r{\A://}.match?(after)
hash_text.match(/(.+)(https?\Z)/) do |matched|

View File

@@ -24,13 +24,11 @@ class Importer::StatusesIndexImporter < Importer::BaseImporter
# is called before rendering the data and we need to filter based
# on the results of the filter, so this filtering happens here instead
bulk.map! do |entry|
new_entry = begin
if entry[:index] && entry.dig(:index, :data, 'searchable_by').blank?
{ delete: entry[:index].except(:data) }
else
entry
end
end
new_entry = if entry[:index] && entry.dig(:index, :data, 'searchable_by').blank?
{ delete: entry[:index].except(:data) }
else
entry
end
if new_entry[:index]
indexed += 1

View File

@@ -232,26 +232,24 @@ class LinkDetailsExtractor
end
def structured_data
@structured_data ||= begin
# Some publications have more than one JSON-LD definition on the page,
# and some of those definitions aren't valid JSON either, so we have
# to loop through here until we find something that is the right type
# and doesn't break
document.xpath('//script[@type="application/ld+json"]').filter_map do |element|
json_ld = element.content&.gsub(CDATA_JUNK_PATTERN, '')
# Some publications have more than one JSON-LD definition on the page,
# and some of those definitions aren't valid JSON either, so we have
# to loop through here until we find something that is the right type
# and doesn't break
@structured_data ||= document.xpath('//script[@type="application/ld+json"]').filter_map do |element|
json_ld = element.content&.gsub(CDATA_JUNK_PATTERN, '')
next if json_ld.blank?
next if json_ld.blank?
structured_data = StructuredData.new(html_entities.decode(json_ld))
structured_data = StructuredData.new(html_entities.decode(json_ld))
next unless structured_data.valid?
next unless structured_data.valid?
structured_data
rescue Oj::ParseError, EncodingError
Rails.logger.debug { "Invalid JSON-LD in #{@original_url}" }
next
end.first
end
structured_data
rescue Oj::ParseError, EncodingError
Rails.logger.debug { "Invalid JSON-LD in #{@original_url}" }
next
end.first
end
def document

View File

@@ -48,7 +48,7 @@ class RateLimiter
{
'X-RateLimit-Limit' => @limit.to_s,
'X-RateLimit-Remaining' => (@limit - (redis.get(key) || 0).to_i).to_s,
'X-RateLimit-Reset' => (now + (@period - now.to_i % @period)).iso8601(6),
'X-RateLimit-Reset' => (now + (@period - (now.to_i % @period))).iso8601(6),
}
end

View File

@@ -215,26 +215,24 @@ class Request
addr_by_socket = {}
addresses.each do |address|
begin
check_private_address(address, host)
check_private_address(address, host)
sock = ::Socket.new(address.is_a?(Resolv::IPv6) ? ::Socket::AF_INET6 : ::Socket::AF_INET, ::Socket::SOCK_STREAM, 0)
sockaddr = ::Socket.pack_sockaddr_in(port, address.to_s)
sock = ::Socket.new(address.is_a?(Resolv::IPv6) ? ::Socket::AF_INET6 : ::Socket::AF_INET, ::Socket::SOCK_STREAM, 0)
sockaddr = ::Socket.pack_sockaddr_in(port, address.to_s)
sock.setsockopt(::Socket::IPPROTO_TCP, ::Socket::TCP_NODELAY, 1)
sock.setsockopt(::Socket::IPPROTO_TCP, ::Socket::TCP_NODELAY, 1)
sock.connect_nonblock(sockaddr)
sock.connect_nonblock(sockaddr)
# If that hasn't raised an exception, we somehow managed to connect
# immediately, close pending sockets and return immediately
socks.each(&:close)
return sock
rescue IO::WaitWritable
socks << sock
addr_by_socket[sock] = sockaddr
rescue => e
outer_e = e
end
# If that hasn't raised an exception, we somehow managed to connect
# immediately, close pending sockets and return immediately
socks.each(&:close)
return sock
rescue IO::WaitWritable
socks << sock
addr_by_socket[sock] = sockaddr
rescue => e
outer_e = e
end
until socks.empty?
@@ -279,9 +277,7 @@ class Request
end
def private_address_exceptions
@private_address_exceptions = begin
(ENV['ALLOWED_PRIVATE_ADDRESSES'] || '').split(',').map { |addr| IPAddr.new(addr) }
end
@private_address_exceptions = (ENV['ALLOWED_PRIVATE_ADDRESSES'] || '').split(',').map { |addr| IPAddr.new(addr) }
end
end
end

View File

@@ -27,8 +27,6 @@ class StatusFinder
end
def verify_action!
unless recognized_params[:action] == 'show'
raise ActiveRecord::RecordNotFound
end
raise ActiveRecord::RecordNotFound unless recognized_params[:action] == 'show'
end
end

View File

@@ -29,7 +29,7 @@ class TranslationService::DeepL < TranslationService
def request(text, source_language, target_language)
req = Request.new(:post, endpoint_url, form: { text: text, source_lang: source_language&.upcase, target_lang: target_language, tag_handling: 'html' })
req.add_headers('Authorization': "DeepL-Auth-Key #{@api_key}")
req.add_headers(Authorization: "DeepL-Auth-Key #{@api_key}")
req
end

View File

@@ -99,7 +99,7 @@ class Webfinger
end
def standard_url
if @domain.end_with? ".onion"
if @domain.end_with? '.onion'
"http://#{@domain}/.well-known/webfinger?resource=#{@uri}"
else
"https://#{@domain}/.well-known/webfinger?resource=#{@uri}"
@@ -107,7 +107,7 @@ class Webfinger
end
def host_meta_url
if @domain.end_with? ".onion"
if @domain.end_with? '.onion'
"http://#{@domain}/.well-known/host-meta"
else
"https://#{@domain}/.well-known/host-meta"