Merge branch 'main' into glitch-soc/merge-upstream
This commit is contained in:
20
lib/active_record/database_tasks_extensions.rb
Normal file
20
lib/active_record/database_tasks_extensions.rb
Normal file
@ -0,0 +1,20 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require_relative '../mastodon/snowflake'
|
||||
|
||||
module ActiveRecord
|
||||
module Tasks
|
||||
module DatabaseTasks
|
||||
original_load_schema = instance_method(:load_schema)
|
||||
|
||||
define_method(:load_schema) do |db_config, *args|
|
||||
ActiveRecord::Base.establish_connection(db_config)
|
||||
Mastodon::Snowflake.define_timestamp_id
|
||||
|
||||
original_load_schema.bind(self).call(db_config, *args)
|
||||
|
||||
Mastodon::Snowflake.ensure_id_sequences_exist
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
23
lib/exceptions.rb
Normal file
23
lib/exceptions.rb
Normal file
@ -0,0 +1,23 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Mastodon
|
||||
class Error < StandardError; end
|
||||
class NotPermittedError < Error; end
|
||||
class ValidationError < Error; end
|
||||
class HostValidationError < ValidationError; end
|
||||
class LengthValidationError < ValidationError; end
|
||||
class DimensionsValidationError < ValidationError; end
|
||||
class StreamValidationError < ValidationError; end
|
||||
class RaceConditionError < Error; end
|
||||
class RateLimitExceededError < Error; end
|
||||
|
||||
class UnexpectedResponseError < Error
|
||||
def initialize(response = nil)
|
||||
if response.respond_to? :uri
|
||||
super("#{response.uri} returned code #{response.code}")
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
@ -14,7 +14,7 @@ module Mastodon
|
||||
end
|
||||
|
||||
MIN_SUPPORTED_VERSION = 2019_10_01_213028
|
||||
MAX_SUPPORTED_VERSION = 2020_12_18_054746
|
||||
MAX_SUPPORTED_VERSION = 2021_03_08_133107
|
||||
|
||||
# Stubs to enjoy ActiveRecord queries while not depending on a particular
|
||||
# version of the code/database
|
||||
@ -142,7 +142,6 @@ module Mastodon
|
||||
@prompt.warn 'Please make sure to stop Mastodon and have a backup.'
|
||||
exit(1) unless @prompt.yes?('Continue?')
|
||||
|
||||
deduplicate_accounts!
|
||||
deduplicate_users!
|
||||
deduplicate_account_domain_blocks!
|
||||
deduplicate_account_identity_proofs!
|
||||
@ -157,6 +156,7 @@ module Mastodon
|
||||
deduplicate_media_attachments!
|
||||
deduplicate_preview_cards!
|
||||
deduplicate_statuses!
|
||||
deduplicate_accounts!
|
||||
deduplicate_tags!
|
||||
deduplicate_webauthn_credentials!
|
||||
|
||||
|
@ -41,42 +41,18 @@
|
||||
|
||||
module Mastodon
|
||||
module MigrationHelpers
|
||||
# Stub for Database.postgresql? from GitLab
|
||||
def self.postgresql?
|
||||
ActiveRecord::Base.configurations[Rails.env]['adapter'].casecmp('postgresql').zero?
|
||||
end
|
||||
|
||||
# Stub for Database.mysql? from GitLab
|
||||
def self.mysql?
|
||||
ActiveRecord::Base.configurations[Rails.env]['adapter'].casecmp('mysql2').zero?
|
||||
end
|
||||
|
||||
# Model that can be used for querying permissions of a SQL user.
|
||||
class Grant < ActiveRecord::Base
|
||||
self.table_name =
|
||||
if Mastodon::MigrationHelpers.postgresql?
|
||||
'information_schema.role_table_grants'
|
||||
else
|
||||
'mysql.user'
|
||||
end
|
||||
self.table_name = 'information_schema.role_table_grants'
|
||||
|
||||
def self.scope_to_current_user
|
||||
if Mastodon::MigrationHelpers.postgresql?
|
||||
where('grantee = user')
|
||||
else
|
||||
where("CONCAT(User, '@', Host) = current_user()")
|
||||
end
|
||||
where('grantee = user')
|
||||
end
|
||||
|
||||
# Returns true if the current user can create and execute triggers on the
|
||||
# given table.
|
||||
def self.create_and_execute_trigger?(table)
|
||||
priv =
|
||||
if Mastodon::MigrationHelpers.postgresql?
|
||||
where(privilege_type: 'TRIGGER', table_name: table)
|
||||
else
|
||||
where(Trigger_priv: 'Y')
|
||||
end
|
||||
priv = where(privilege_type: 'TRIGGER', table_name: table)
|
||||
|
||||
priv.scope_to_current_user.any?
|
||||
end
|
||||
@ -141,10 +117,8 @@ module Mastodon
|
||||
'in the body of your migration class'
|
||||
end
|
||||
|
||||
if MigrationHelpers.postgresql?
|
||||
options = options.merge({ algorithm: :concurrently })
|
||||
disable_statement_timeout
|
||||
end
|
||||
options = options.merge({ algorithm: :concurrently })
|
||||
disable_statement_timeout
|
||||
|
||||
add_index(table_name, column_name, options)
|
||||
end
|
||||
@ -199,8 +173,6 @@ module Mastodon
|
||||
|
||||
# Only available on Postgresql >= 9.2
|
||||
def supports_drop_index_concurrently?
|
||||
return false unless MigrationHelpers.postgresql?
|
||||
|
||||
version = select_one("SELECT current_setting('server_version_num') AS v")['v'].to_i
|
||||
|
||||
version >= 90200
|
||||
@ -226,13 +198,7 @@ module Mastodon
|
||||
# While MySQL does allow disabling of foreign keys it has no equivalent
|
||||
# of PostgreSQL's "VALIDATE CONSTRAINT". As a result we'll just fall
|
||||
# back to the normal foreign key procedure.
|
||||
if MigrationHelpers.mysql?
|
||||
return add_foreign_key(source, target,
|
||||
column: column,
|
||||
on_delete: on_delete)
|
||||
else
|
||||
on_delete = 'SET NULL' if on_delete == :nullify
|
||||
end
|
||||
on_delete = 'SET NULL' if on_delete == :nullify
|
||||
|
||||
disable_statement_timeout
|
||||
|
||||
@ -270,7 +236,7 @@ module Mastodon
|
||||
# the database. Disable the session's statement timeout to ensure
|
||||
# migrations don't get killed prematurely. (PostgreSQL only)
|
||||
def disable_statement_timeout
|
||||
execute('SET statement_timeout TO 0') if MigrationHelpers.postgresql?
|
||||
execute('SET statement_timeout TO 0')
|
||||
end
|
||||
|
||||
# Updates the value of a column in batches.
|
||||
@ -319,7 +285,7 @@ module Mastodon
|
||||
count_arel = table.project(Arel.star.count.as('count'))
|
||||
count_arel = yield table, count_arel if block_given?
|
||||
|
||||
total = exec_query(count_arel.to_sql).to_hash.first['count'].to_i
|
||||
total = exec_query(count_arel.to_sql).to_ary.first['count'].to_i
|
||||
|
||||
return if total == 0
|
||||
end
|
||||
@ -335,7 +301,7 @@ module Mastodon
|
||||
|
||||
start_arel = table.project(table[:id]).order(table[:id].asc).take(1)
|
||||
start_arel = yield table, start_arel if block_given?
|
||||
first_row = exec_query(start_arel.to_sql).to_hash.first
|
||||
first_row = exec_query(start_arel.to_sql).to_ary.first
|
||||
# In case there are no rows but we didn't catch it in the estimated size:
|
||||
return unless first_row
|
||||
start_id = first_row['id'].to_i
|
||||
@ -356,7 +322,7 @@ module Mastodon
|
||||
.skip(batch_size)
|
||||
|
||||
stop_arel = yield table, stop_arel if block_given?
|
||||
stop_row = exec_query(stop_arel.to_sql).to_hash.first
|
||||
stop_row = exec_query(stop_arel.to_sql).to_ary.first
|
||||
|
||||
update_arel = Arel::UpdateManager.new
|
||||
.table(table)
|
||||
@ -487,11 +453,7 @@ module Mastodon
|
||||
# If we were in the middle of update_column_in_batches, we should remove
|
||||
# the old column and start over, as we have no idea where we were.
|
||||
if column_for(table, new)
|
||||
if MigrationHelpers.postgresql?
|
||||
remove_rename_triggers_for_postgresql(table, trigger_name)
|
||||
else
|
||||
remove_rename_triggers_for_mysql(trigger_name)
|
||||
end
|
||||
remove_rename_triggers_for_postgresql(table, trigger_name)
|
||||
|
||||
remove_column(table, new)
|
||||
end
|
||||
@ -521,13 +483,8 @@ module Mastodon
|
||||
quoted_old = quote_column_name(old)
|
||||
quoted_new = quote_column_name(new)
|
||||
|
||||
if MigrationHelpers.postgresql?
|
||||
install_rename_triggers_for_postgresql(trigger_name, quoted_table,
|
||||
quoted_old, quoted_new)
|
||||
else
|
||||
install_rename_triggers_for_mysql(trigger_name, quoted_table,
|
||||
quoted_old, quoted_new)
|
||||
end
|
||||
install_rename_triggers_for_postgresql(trigger_name, quoted_table,
|
||||
quoted_old, quoted_new)
|
||||
|
||||
update_column_in_batches(table, new, Arel::Table.new(table)[old])
|
||||
|
||||
@ -685,11 +642,7 @@ module Mastodon
|
||||
|
||||
check_trigger_permissions!(table)
|
||||
|
||||
if MigrationHelpers.postgresql?
|
||||
remove_rename_triggers_for_postgresql(table, trigger_name)
|
||||
else
|
||||
remove_rename_triggers_for_mysql(trigger_name)
|
||||
end
|
||||
remove_rename_triggers_for_postgresql(table, trigger_name)
|
||||
|
||||
remove_column(table, old)
|
||||
end
|
||||
@ -844,18 +797,9 @@ module Mastodon
|
||||
quoted_pattern = Arel::Nodes::Quoted.new(pattern.to_s)
|
||||
quoted_replacement = Arel::Nodes::Quoted.new(replacement.to_s)
|
||||
|
||||
if MigrationHelpers.mysql?
|
||||
locate = Arel::Nodes::NamedFunction
|
||||
.new('locate', [quoted_pattern, column])
|
||||
insert_in_place = Arel::Nodes::NamedFunction
|
||||
.new('insert', [column, locate, pattern.size, quoted_replacement])
|
||||
|
||||
Arel::Nodes::SqlLiteral.new(insert_in_place.to_sql)
|
||||
else
|
||||
replace = Arel::Nodes::NamedFunction
|
||||
.new("regexp_replace", [column, quoted_pattern, quoted_replacement])
|
||||
Arel::Nodes::SqlLiteral.new(replace.to_sql)
|
||||
end
|
||||
replace = Arel::Nodes::NamedFunction
|
||||
.new("regexp_replace", [column, quoted_pattern, quoted_replacement])
|
||||
Arel::Nodes::SqlLiteral.new(replace.to_sql)
|
||||
end
|
||||
|
||||
def remove_foreign_key_without_error(*args)
|
||||
|
@ -27,6 +27,8 @@ namespace = ENV.fetch('REDIS_NAMESPACE', nil)
|
||||
cache_namespace = namespace ? namespace + '_cache' : 'cache'
|
||||
|
||||
REDIS_CACHE_PARAMS = {
|
||||
driver: :hiredis,
|
||||
url: ENV['REDIS_URL'],
|
||||
expires_in: 10.minutes,
|
||||
namespace: cache_namespace,
|
||||
}.freeze
|
||||
|
139
lib/sanitize_ext/sanitize_config.rb
Normal file
139
lib/sanitize_ext/sanitize_config.rb
Normal file
@ -0,0 +1,139 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Sanitize
|
||||
module Config
|
||||
HTTP_PROTOCOLS = %w(
|
||||
http
|
||||
https
|
||||
).freeze
|
||||
|
||||
LINK_PROTOCOLS = %w(
|
||||
http
|
||||
https
|
||||
dat
|
||||
dweb
|
||||
ipfs
|
||||
ipns
|
||||
ssb
|
||||
gopher
|
||||
xmpp
|
||||
magnet
|
||||
gemini
|
||||
).freeze
|
||||
|
||||
CLASS_WHITELIST_TRANSFORMER = lambda do |env|
|
||||
node = env[:node]
|
||||
class_list = node['class']&.split(/[\t\n\f\r ]/)
|
||||
|
||||
return unless class_list
|
||||
|
||||
class_list.keep_if do |e|
|
||||
next true if /^(h|p|u|dt|e)-/.match?(e) # microformats classes
|
||||
next true if /^(mention|hashtag)$/.match?(e) # semantic classes
|
||||
next true if /^(ellipsis|invisible)$/.match?(e) # link formatting classes
|
||||
end
|
||||
|
||||
node['class'] = class_list.join(' ')
|
||||
end
|
||||
|
||||
IMG_TAG_TRANSFORMER = lambda do |env|
|
||||
node = env[:node]
|
||||
|
||||
return unless env[:node_name] == 'img'
|
||||
|
||||
node.name = 'a'
|
||||
|
||||
node['href'] = node['src']
|
||||
if node['alt'].present?
|
||||
node.content = "[🖼 #{node['alt']}]"
|
||||
else
|
||||
url = node['href']
|
||||
prefix = url.match(/\Ahttps?:\/\/(www\.)?/).to_s
|
||||
text = url[prefix.length, 30]
|
||||
text = text + "…" if url[prefix.length..-1].length > 30
|
||||
node.content = "[🖼 #{text}]"
|
||||
end
|
||||
end
|
||||
|
||||
LINK_REL_TRANSFORMER = lambda do |env|
|
||||
return unless env[:node_name] == 'a' and env[:node]['href']
|
||||
|
||||
node = env[:node]
|
||||
|
||||
rel = (node['rel'] || '').split(' ') & ['tag']
|
||||
unless env[:config][:outgoing] && TagManager.instance.local_url?(node['href'])
|
||||
rel += ['nofollow', 'noopener', 'noreferrer']
|
||||
end
|
||||
node['rel'] = rel.join(' ')
|
||||
end
|
||||
|
||||
UNSUPPORTED_HREF_TRANSFORMER = lambda do |env|
|
||||
return unless env[:node_name] == 'a'
|
||||
|
||||
current_node = env[:node]
|
||||
|
||||
scheme = begin
|
||||
if current_node['href'] =~ Sanitize::REGEX_PROTOCOL
|
||||
Regexp.last_match(1).downcase
|
||||
else
|
||||
:relative
|
||||
end
|
||||
end
|
||||
|
||||
current_node.replace(current_node.text) unless LINK_PROTOCOLS.include?(scheme)
|
||||
end
|
||||
|
||||
MASTODON_STRICT ||= freeze_config(
|
||||
elements: %w(p br span a abbr del pre blockquote code b strong u sub sup i em h1 h2 h3 h4 h5 ul ol li),
|
||||
|
||||
attributes: {
|
||||
'a' => %w(href rel class title),
|
||||
'span' => %w(class),
|
||||
'abbr' => %w(title),
|
||||
'blockquote' => %w(cite),
|
||||
'ol' => %w(start reversed),
|
||||
'li' => %w(value),
|
||||
},
|
||||
|
||||
add_attributes: {
|
||||
'a' => {
|
||||
'target' => '_blank',
|
||||
},
|
||||
},
|
||||
|
||||
protocols: {
|
||||
'a' => { 'href' => LINK_PROTOCOLS },
|
||||
'blockquote' => { 'cite' => LINK_PROTOCOLS },
|
||||
},
|
||||
|
||||
transformers: [
|
||||
CLASS_WHITELIST_TRANSFORMER,
|
||||
IMG_TAG_TRANSFORMER,
|
||||
UNSUPPORTED_HREF_TRANSFORMER,
|
||||
LINK_REL_TRANSFORMER,
|
||||
]
|
||||
)
|
||||
|
||||
MASTODON_OEMBED ||= freeze_config merge(
|
||||
RELAXED,
|
||||
elements: RELAXED[:elements] + %w(audio embed iframe source video),
|
||||
|
||||
attributes: merge(
|
||||
RELAXED[:attributes],
|
||||
'audio' => %w(controls),
|
||||
'embed' => %w(height src type width),
|
||||
'iframe' => %w(allowfullscreen frameborder height scrolling src width),
|
||||
'source' => %w(src type),
|
||||
'video' => %w(controls height loop width),
|
||||
'div' => [:data]
|
||||
),
|
||||
|
||||
protocols: merge(
|
||||
RELAXED[:protocols],
|
||||
'embed' => { 'src' => HTTP_PROTOCOLS },
|
||||
'iframe' => { 'src' => HTTP_PROTOCOLS },
|
||||
'source' => { 'src' => HTTP_PROTOCOLS }
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
@ -1,36 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require_relative '../mastodon/snowflake'
|
||||
|
||||
def each_schema_load_environment
|
||||
# If we're in development, also run this for the test environment.
|
||||
# This is a somewhat hacky way to do this, so here's why:
|
||||
# 1. We have to define this before we load the schema, or we won't
|
||||
# have a timestamp_id function when we get to it in the schema.
|
||||
# 2. db:setup calls db:schema:load_if_ruby, which calls
|
||||
# db:schema:load, which we define above as having a prerequisite
|
||||
# of this task.
|
||||
# 3. db:schema:load ends up running
|
||||
# ActiveRecord::Tasks::DatabaseTasks.load_schema_current, which
|
||||
# calls a private method `each_current_configuration`, which
|
||||
# explicitly also does the loading for the `test` environment
|
||||
# if the current environment is `development`, so we end up
|
||||
# needing to do the same, and we can't even use the same method
|
||||
# to do it.
|
||||
|
||||
if Rails.env.development?
|
||||
test_conf = ActiveRecord::Base.configurations['test']
|
||||
|
||||
if test_conf['database']&.present?
|
||||
ActiveRecord::Base.establish_connection(:test)
|
||||
yield
|
||||
ActiveRecord::Base.establish_connection(Rails.env.to_sym)
|
||||
end
|
||||
end
|
||||
|
||||
yield
|
||||
end
|
||||
|
||||
namespace :db do
|
||||
namespace :migrate do
|
||||
desc 'Setup the db or migrate depending on state of db'
|
||||
@ -50,7 +19,7 @@ namespace :db do
|
||||
|
||||
task :post_migration_hook do
|
||||
at_exit do
|
||||
unless %w(C POSIX).include?(ActiveRecord::Base.connection.execute('SELECT datcollate FROM pg_database WHERE datname = current_database();').first['datcollate'])
|
||||
unless %w(C POSIX).include?(ActiveRecord::Base.connection.select_one('SELECT datcollate FROM pg_database WHERE datname = current_database();')['datcollate'])
|
||||
warn <<~WARNING
|
||||
Your database collation is susceptible to index corruption.
|
||||
(This warning does not indicate that index corruption has occured and can be ignored)
|
||||
@ -60,30 +29,11 @@ namespace :db do
|
||||
end
|
||||
end
|
||||
|
||||
task :pre_migration_check do
|
||||
version = ActiveRecord::Base.connection.select_one("SELECT current_setting('server_version_num') AS v")['v'].to_i
|
||||
abort 'ERROR: This version of Mastodon requires PostgreSQL 9.5 or newer. Please update PostgreSQL before updating Mastodon.' if version < 90_500
|
||||
end
|
||||
|
||||
Rake::Task['db:migrate'].enhance(['db:pre_migration_check'])
|
||||
Rake::Task['db:migrate'].enhance(['db:post_migration_hook'])
|
||||
|
||||
# Before we load the schema, define the timestamp_id function.
|
||||
# Idiomatically, we might do this in a migration, but then it
|
||||
# wouldn't end up in schema.rb, so we'd need to figure out a way to
|
||||
# get it in before doing db:setup as well. This is simpler, and
|
||||
# ensures it's always in place.
|
||||
Rake::Task['db:schema:load'].enhance ['db:define_timestamp_id']
|
||||
|
||||
# After we load the schema, make sure we have sequences for each
|
||||
# table using timestamp IDs.
|
||||
Rake::Task['db:schema:load'].enhance do
|
||||
Rake::Task['db:ensure_id_sequences_exist'].invoke
|
||||
end
|
||||
|
||||
task :define_timestamp_id do
|
||||
each_schema_load_environment do
|
||||
Mastodon::Snowflake.define_timestamp_id
|
||||
end
|
||||
end
|
||||
|
||||
task :ensure_id_sequences_exist do
|
||||
each_schema_load_environment do
|
||||
Mastodon::Snowflake.ensure_id_sequences_exist
|
||||
end
|
||||
end
|
||||
end
|
||||
|
Reference in New Issue
Block a user