Merge tag 'v4.2.0-beta2'

This commit is contained in:
bgme 2023-08-22 03:51:20 +08:00
commit 9e38d55101
3010 changed files with 81215 additions and 55173 deletions

View file

@ -29,7 +29,7 @@ module ActiveRecord
if flatten
yield record[1]
else
yield record[1..-1]
yield record[1..]
end
end

View file

@ -1 +1,3 @@
use { color: #000 !important; }
use {
color: #000 !important;
}

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
module Chewy
module IndexExtensions
def index_preset(base_options = {})
case ENV['ES_PRESET'].presence
when 'single_node_cluster', nil
base_options.merge(number_of_replicas: 0)
when 'small_cluster'
base_options.merge(number_of_replicas: 1)
when 'large_cluster'
base_options.merge(number_of_replicas: 1, number_of_shards: (base_options[:number_of_shards] || 1) * 2)
end
end
end
end
Chewy::Index.extend(Chewy::IndexExtensions)

View file

@ -0,0 +1,11 @@
# frozen_string_literal: true
module Chewy
module SettingsExtensions
def enabled?
settings[:enabled]
end
end
end
Chewy.extend(Chewy::SettingsExtensions)

View file

@ -0,0 +1,10 @@
Description:
Generate a Rails migration in the db/post_migrate/ dir.
Interacts with the post_deployment_migrations initializer.
Example:
bin/rails generate post_deployment_migration IsolateChanges
Creates a migration in db/post_migrate/<timestamp>_isolate_changes.rb
which will have `disable_ddl_transaction!` and a `change` method included.

View file

@ -0,0 +1,17 @@
# frozen_string_literal: true
require 'rails/generators/active_record'
class PostDeploymentMigrationGenerator < Rails::Generators::NamedBase
source_root File.expand_path('templates', __dir__)
include Rails::Generators::Migration
def create_post_deployment_migration
migration_template 'migration.erb', "db/post_migrate/#{file_name}.rb"
end
def self.next_migration_number(path)
ActiveRecord::Generators::Base.next_migration_number(path)
end
end

View file

@ -0,0 +1,8 @@
# frozen_string_literal: true
class <%= migration_class_name %> < ActiveRecord::Migration[<%= ActiveRecord::Migration.current_version %>]
disable_ddl_transaction!
def change
end
end

View file

@ -1,17 +0,0 @@
# frozen_string_literal: true
require 'rails/generators'
module Rails
class PostDeploymentMigrationGenerator < Rails::Generators::NamedBase
def create_migration_file
timestamp = Time.zone.now.strftime('%Y%m%d%H%M%S')
template 'migration.rb', "db/post_migrate/#{timestamp}_#{file_name}.rb"
end
def migration_class_name
file_name.camelize
end
end
end

8
lib/http_extensions.rb Normal file
View file

@ -0,0 +1,8 @@
# frozen_string_literal: true
# Monkey patching until https://github.com/httprb/http/pull/757 is merged
unless HTTP::Request::METHODS.include?(:purge)
methods = HTTP::Request::METHODS.dup
HTTP::Request.send(:remove_const, :METHODS)
HTTP::Request.const_set(:METHODS, methods.push(:purge).freeze)
end

View file

@ -0,0 +1,26 @@
# frozen_string_literal: true
module HamlLint
# Bans the usage of “•” (bullet) in HTML/HAML in favor of “·” (middle dot) in anything that will end up as a text node. (including string literals in Ruby code)
class Linter::MiddleDot < Linter
include LinterRegistry
# rubocop:disable Style/MiddleDot
BULLET = '•'
# rubocop:enable Style/MiddleDot
MIDDLE_DOT = '·'
MESSAGE = "Use '#{MIDDLE_DOT}' (middle dot) instead of '#{BULLET}' (bullet)".freeze
def visit_plain(node)
return unless node.text.include?(BULLET)
record_lint(node, MESSAGE)
end
def visit_script(node)
return unless node.script.include?(BULLET)
record_lint(node, MESSAGE)
end
end
end

View file

@ -0,0 +1,31 @@
# frozen_string_literal: true
module RuboCop
module Cop
module Style
# Bans the usage of “•” (bullet) in HTML/HAML in favor of “·” (middle dot) in string literals
class MiddleDot < Base
extend AutoCorrector
extend Util
# rubocop:disable Style/MiddleDot
BULLET = '•'
# rubocop:enable Style/MiddleDot
MIDDLE_DOT = '·'
MESSAGE = "Use '#{MIDDLE_DOT}' (middle dot) instead of '#{BULLET}' (bullet)".freeze
def on_str(node)
# Constants like __FILE__ are handled as strings,
# but don't respond to begin.
return unless node.loc.respond_to?(:begin) && node.loc.begin
return unless node.value.include?(BULLET)
add_offense(node, message: MESSAGE) do |corrector|
corrector.replace(node, node.source.gsub(BULLET, MIDDLE_DOT))
end
end
end
end
end
end

View file

@ -1,60 +0,0 @@
# frozen_string_literal: true
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
module Mastodon
class CacheCLI < Thor
include CLIHelper
def self.exit_on_failure?
true
end
desc 'clear', 'Clear out the cache storage'
def clear
Rails.cache.clear
say('OK', :green)
end
option :concurrency, type: :numeric, default: 5, aliases: [:c]
option :verbose, type: :boolean, aliases: [:v]
desc 'recount TYPE', 'Update hard-cached counters'
long_desc <<~LONG_DESC
Update hard-cached counters of TYPE by counting referenced
records from scratch. TYPE can be "accounts" or "statuses".
It may take a very long time to finish, depending on the
size of the database.
LONG_DESC
def recount(type)
case type
when 'accounts'
processed, = parallelize_with_progress(Account.local.includes(:account_stat)) do |account|
account_stat = account.account_stat
account_stat.following_count = account.active_relationships.count
account_stat.followers_count = account.passive_relationships.count
account_stat.statuses_count = account.statuses.where.not(visibility: :direct).count
account_stat.save if account_stat.changed?
end
when 'statuses'
processed, = parallelize_with_progress(Status.includes(:status_stat)) do |status|
status_stat = status.status_stat
status_stat.replies_count = status.replies.where.not(visibility: :direct).count
status_stat.reblogs_count = status.reblogs.count
status_stat.favourites_count = status.favourites.count
status_stat.save if status_stat.changed?
end
else
say("Unknown type: #{type}", :red)
exit(1)
end
say
say("OK, recounted #{processed} records", :green)
end
end
end

View file

@ -1,18 +1,10 @@
# frozen_string_literal: true
require 'set'
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
module Mastodon
class AccountsCLI < Thor
include CLIHelper
def self.exit_on_failure?
true
end
require_relative 'base'
module Mastodon::CLI
class Accounts < Base
option :all, type: :boolean
desc 'rotate [USERNAME]', 'Generate and broadcast new keys'
long_desc <<-LONG_DESC
@ -57,6 +49,7 @@ module Mastodon
option :role
option :reattach, type: :boolean
option :force, type: :boolean
option :approve, type: :boolean
desc 'create USERNAME', 'Create a new user account'
long_desc <<-LONG_DESC
Create a new user account with a given USERNAME and an
@ -72,6 +65,8 @@ module Mastodon
account is still in use by someone else, you can supply
the --force option to delete the old record and reattach the
username to the new account anyway.
With the --approve option, the account will be approved.
LONG_DESC
def create(username)
role_id = nil
@ -89,7 +84,7 @@ module Mastodon
account = Account.new(username: username)
password = SecureRandom.hex
user = User.new(email: options[:email], password: password, agreement: true, approved: true, role_id: role_id, confirmed_at: options[:confirmed] ? Time.now.utc : nil, bypass_invite_request_check: true)
user = User.new(email: options[:email], password: password, agreement: true, role_id: role_id, confirmed_at: options[:confirmed] ? Time.now.utc : nil, bypass_invite_request_check: true)
if options[:reattach]
account = Account.find_local(username) || Account.new(username: username)
@ -99,7 +94,8 @@ module Mastodon
say('Use --force to reattach it anyway and delete the other user')
return
elsif account.user.present?
DeleteAccountService.new.call(account, reserve_email: false)
DeleteAccountService.new.call(account, reserve_email: false, reserve_username: false)
account = Account.new(username: username)
end
end
@ -112,15 +108,12 @@ module Mastodon
user.confirm!
end
user.approve! if options[:approve]
say('OK', :green)
say("New password: #{password}")
else
user.errors.to_h.each do |key, error|
say('Failure/Error: ', :red)
say(key)
say(" #{error}", :red)
end
report_errors(user.errors)
exit(1)
end
end
@ -184,18 +177,14 @@ module Mastodon
user.disabled = true if options[:disable]
user.approved = true if options[:approve]
user.otp_required_for_login = false if options[:disable_2fa]
user.confirm if options[:confirm]
if user.save
user.confirm if options[:confirm]
say('OK', :green)
say("New password: #{password}") if options[:reset_password]
else
user.errors.to_h.each do |key, error|
say('Failure/Error: ', :red)
say(key)
say(" #{error}", :red)
end
report_errors(user.errors)
exit(1)
end
end
@ -218,7 +207,6 @@ module Mastodon
exit(1)
end
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
account = nil
if username.present?
@ -235,9 +223,9 @@ module Mastodon
end
end
say("Deleting user with #{account.statuses_count} statuses, this might take a while...#{dry_run}")
DeleteAccountService.new.call(account, reserve_email: false) unless options[:dry_run]
say("OK#{dry_run}", :green)
say("Deleting user with #{account.statuses_count} statuses, this might take a while...#{dry_run_mode_suffix}")
DeleteAccountService.new.call(account, reserve_email: false) unless dry_run?
say("OK#{dry_run_mode_suffix}", :green)
end
option :force, type: :boolean, aliases: [:f], description: 'Override public key check'
@ -292,7 +280,7 @@ module Mastodon
Account.remote.select(:uri, 'count(*)').group(:uri).having('count(*) > 1').pluck(:uri).each do |uri|
say("Duplicates found for #{uri}")
begin
ActivityPub::FetchRemoteAccountService.new.call(uri) unless options[:dry_run]
ActivityPub::FetchRemoteAccountService.new.call(uri) unless dry_run?
rescue => e
say("Error processing #{uri}: #{e}", :red)
end
@ -333,7 +321,6 @@ module Mastodon
LONG_DESC
def cull(*domains)
skip_threshold = 7.days.ago
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
skip_domains = Concurrent::Set.new
query = Account.remote.where(protocol: :activitypub)
@ -346,12 +333,12 @@ module Mastodon
begin
code = Request.new(:head, account.uri).perform(&:code)
rescue HTTP::TimeoutError, HTTP::ConnectionError, OpenSSL::SSL::SSLError
rescue HTTP::TimeoutError, HTTP::ConnectionError, OpenSSL::SSL::SSLError, Mastodon::PrivateNetworkAddressError
skip_domains << account.domain
end
if [404, 410].include?(code)
DeleteAccountService.new.call(account, reserve_username: false) unless options[:dry_run]
DeleteAccountService.new.call(account, reserve_username: false) unless dry_run?
1
else
# Touch account even during dry run to avoid getting the account into the window again
@ -359,7 +346,7 @@ module Mastodon
end
end
say("Visited #{processed} accounts, removed #{culled}#{dry_run}", :green)
say("Visited #{processed} accounts, removed #{culled}#{dry_run_mode_suffix}", :green)
unless skip_domains.empty?
say('The following domains were not available during the check:', :yellow)
@ -382,21 +369,19 @@ module Mastodon
specified with space-separated USERNAMES.
LONG_DESC
def refresh(*usernames)
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
if options[:domain] || options[:all]
scope = Account.remote
scope = scope.where(domain: options[:domain]) if options[:domain]
processed, = parallelize_with_progress(scope) do |account|
next if options[:dry_run]
next if dry_run?
account.reset_avatar!
account.reset_header!
account.save
end
say("Refreshed #{processed} accounts#{dry_run}", :green, true)
say("Refreshed #{processed} accounts#{dry_run_mode_suffix}", :green, true)
elsif !usernames.empty?
usernames.each do |user|
user, domain = user.split('@')
@ -407,7 +392,7 @@ module Mastodon
exit(1)
end
next if options[:dry_run]
next if dry_run?
begin
account.reset_avatar!
@ -418,7 +403,7 @@ module Mastodon
end
end
say("OK#{dry_run}", :green)
say("OK#{dry_run_mode_suffix}", :green)
else
say('No account(s) given', :red)
exit(1)
@ -496,14 +481,12 @@ module Mastodon
scope = Account.where(id: ::Follow.where(account: account).select(:target_account_id))
scope.find_each do |target_account|
begin
UnfollowService.new.call(account, target_account)
rescue => e
progress.log pastel.red("Error processing #{target_account.id}: #{e}")
ensure
progress.increment
processed += 1
end
UnfollowService.new.call(account, target_account)
rescue => e
progress.log pastel.red("Error processing #{target_account.id}: #{e}")
ensure
progress.increment
processed += 1
end
BootstrapTimelineWorker.perform_async(account.id)
@ -513,14 +496,12 @@ module Mastodon
scope = Account.where(id: ::Follow.where(target_account: account).select(:account_id))
scope.find_each do |target_account|
begin
UnfollowService.new.call(target_account, account)
rescue => e
progress.log pastel.red("Error processing #{target_account.id}: #{e}")
ensure
progress.increment
processed += 1
end
UnfollowService.new.call(target_account, account)
rescue => e
progress.log pastel.red("Error processing #{target_account.id}: #{e}")
ensure
progress.increment
processed += 1
end
end
@ -541,7 +522,7 @@ module Mastodon
if options[:all]
User.pending.find_each(&:approve!)
say('OK', :green)
elsif options[:number]
elsif options[:number]&.positive?
User.pending.order(created_at: :asc).limit(options[:number]).each(&:approve!)
say('OK', :green)
elsif username.present?
@ -555,6 +536,7 @@ module Mastodon
account.user&.approve!
say('OK', :green)
else
say('Number must be positive', :red) if options[:number]
exit(1)
end
end
@ -572,8 +554,6 @@ module Mastodon
- not muted/blocked by us
LONG_DESC
def prune
dry_run = options[:dry_run] ? ' (dry run)' : ''
query = Account.remote.where.not(actor_type: %i(Application Service))
query = query.where('NOT EXISTS (SELECT 1 FROM mentions WHERE account_id = accounts.id)')
query = query.where('NOT EXISTS (SELECT 1 FROM favourites WHERE account_id = accounts.id)')
@ -589,11 +569,11 @@ module Mastodon
next if account.suspended?
next if account.silenced?
account.destroy unless options[:dry_run]
account.destroy unless dry_run?
1
end
say("OK, pruned #{deleted} accounts#{dry_run}", :green)
say("OK, pruned #{deleted} accounts#{dry_run_mode_suffix}", :green)
end
option :force, type: :boolean
@ -671,6 +651,14 @@ module Mastodon
private
def report_errors(errors)
errors.each do |error|
say('Failure/Error: ', :red)
say(error.attribute)
say(" #{error.type}", :red)
end
end
def rotate_keys_for_account(account, delay = 0)
if account.nil?
say('No such account', :red)

42
lib/mastodon/cli/base.rb Normal file
View file

@ -0,0 +1,42 @@
# frozen_string_literal: true
require_relative '../../../config/boot'
require_relative '../../../config/environment'
require 'thor'
require_relative 'progress_helper'
module Mastodon
module CLI
class Base < Thor
include ProgressHelper
def self.exit_on_failure?
true
end
private
def pastel
@pastel ||= Pastel.new
end
def dry_run?
options[:dry_run]
end
def dry_run_mode_suffix
dry_run? ? ' (DRY RUN)' : ''
end
def reset_connection_pools!
ActiveRecord::Base.establish_connection(
ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).first.configuration_hash
.dup
.tap { |config| config['pool'] = options[:concurrency] + 1 }
)
RedisConfiguration.establish_pool(options[:concurrency])
end
end
end
end

72
lib/mastodon/cli/cache.rb Normal file
View file

@ -0,0 +1,72 @@
# frozen_string_literal: true
require_relative 'base'
module Mastodon::CLI
class Cache < Base
desc 'clear', 'Clear out the cache storage'
def clear
Rails.cache.clear
say('OK', :green)
end
option :concurrency, type: :numeric, default: 5, aliases: [:c]
option :verbose, type: :boolean, aliases: [:v]
desc 'recount TYPE', 'Update hard-cached counters'
long_desc <<~LONG_DESC
Update hard-cached counters of TYPE by counting referenced
records from scratch. TYPE can be "accounts" or "statuses".
It may take a very long time to finish, depending on the
size of the database.
LONG_DESC
def recount(type)
case type
when 'accounts'
processed, = parallelize_with_progress(accounts_with_stats) do |account|
recount_account_stats(account)
end
when 'statuses'
processed, = parallelize_with_progress(statuses_with_stats) do |status|
recount_status_stats(status)
end
else
say("Unknown type: #{type}", :red)
exit(1)
end
say
say("OK, recounted #{processed} records", :green)
end
private
def accounts_with_stats
Account.local.includes(:account_stat)
end
def statuses_with_stats
Status.includes(:status_stat)
end
def recount_account_stats(account)
account.account_stat.tap do |account_stat|
account_stat.following_count = account.active_relationships.count
account_stat.followers_count = account.passive_relationships.count
account_stat.statuses_count = account.statuses.where.not(visibility: :direct).count
account_stat.save if account_stat.changed?
end
end
def recount_status_stats(status)
status.status_stat.tap do |status_stat|
status_stat.replies_count = status.replies.where.not(visibility: :direct).count
status_stat.reblogs_count = status.reblogs.count
status_stat.favourites_count = status.favourites.count
status_stat.save if status_stat.changed?
end
end
end
end

View file

@ -1,18 +1,10 @@
# frozen_string_literal: true
require 'concurrent'
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
module Mastodon
class CanonicalEmailBlocksCLI < Thor
include CLIHelper
def self.exit_on_failure?
true
end
require_relative 'base'
module Mastodon::CLI
class CanonicalEmailBlocks < Base
desc 'find EMAIL', 'Find a given e-mail address in the canonical e-mail blocks'
long_desc <<-LONG_DESC
When suspending a local user, a hash of a "canonical" version of their e-mail

View file

@ -1,18 +1,10 @@
# frozen_string_literal: true
require 'concurrent'
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
module Mastodon
class DomainsCLI < Thor
include CLIHelper
def self.exit_on_failure?
true
end
require_relative 'base'
module Mastodon::CLI
class Domains < Base
option :concurrency, type: :numeric, default: 5, aliases: [:c]
option :verbose, type: :boolean, aliases: [:v]
option :dry_run, type: :boolean
@ -42,7 +34,6 @@ module Mastodon
When the --purge-domain-blocks option is given, also purge matching domain blocks.
LONG_DESC
def purge(*domains)
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
domains = domains.map { |domain| TagManager.instance.normalize_domain(domain) }
account_scope = Account.none
domain_block_scope = DomainBlock.none
@ -87,23 +78,23 @@ module Mastodon
# Actually perform the deletions
processed, = parallelize_with_progress(account_scope) do |account|
DeleteAccountService.new.call(account, reserve_username: false, skip_side_effects: true) unless options[:dry_run]
DeleteAccountService.new.call(account, reserve_username: false, skip_side_effects: true) unless dry_run?
end
say("Removed #{processed} accounts#{dry_run}", :green)
say("Removed #{processed} accounts#{dry_run_mode_suffix}", :green)
if options[:purge_domain_blocks]
domain_block_count = domain_block_scope.count
domain_block_scope.in_batches.destroy_all unless options[:dry_run]
say("Removed #{domain_block_count} domain blocks#{dry_run}", :green)
domain_block_scope.in_batches.destroy_all unless dry_run?
say("Removed #{domain_block_count} domain blocks#{dry_run_mode_suffix}", :green)
end
custom_emojis_count = emoji_scope.count
emoji_scope.in_batches.destroy_all unless options[:dry_run]
emoji_scope.in_batches.destroy_all unless dry_run?
Instance.refresh unless options[:dry_run]
Instance.refresh unless dry_run?
say("Removed #{custom_emojis_count} custom emojis#{dry_run}", :green)
say("Removed #{custom_emojis_count} custom emojis#{dry_run_mode_suffix}", :green)
end
option :concurrency, type: :numeric, default: 50, aliases: [:c]
@ -139,7 +130,7 @@ module Mastodon
pool = Concurrent::ThreadPoolExecutor.new(min_threads: 0, max_threads: options[:concurrency], idletime: 10, auto_terminate: true, max_queue: 0)
work_unit = ->(domain) do
work_unit = lambda do |domain|
next if stats.key?(domain)
next if options[:exclude_suspended] && domain.match?(blocked_domains)
@ -148,6 +139,7 @@ module Mastodon
begin
Request.new(:get, "https://#{domain}/api/v1/instance").perform do |res|
next unless res.code == 200
stats[domain] = Oj.load(res.to_s)
end
@ -161,9 +153,10 @@ module Mastodon
Request.new(:get, "https://#{domain}/api/v1/instance/activity").perform do |res|
next unless res.code == 200
stats[domain]['activity'] = Oj.load(res.to_s)
end
rescue StandardError
rescue
failed.increment
ensure
processed.increment

View file

@ -1,18 +1,10 @@
# frozen_string_literal: true
require 'concurrent'
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
module Mastodon
class EmailDomainBlocksCLI < Thor
include CLIHelper
def self.exit_on_failure?
true
end
require_relative 'base'
module Mastodon::CLI
class EmailDomainBlocks < Base
desc 'list', 'List blocked e-mail domains'
def list
EmailDomainBlock.where(parent_id: nil).order(id: 'DESC').find_each do |entry|
@ -47,7 +39,7 @@ module Mastodon
processed = 0
domains.each do |domain|
if EmailDomainBlock.where(domain: domain).exists?
if EmailDomainBlock.exists?(domain: domain)
say("#{domain} is already blocked.", :yellow)
skipped += 1
next
@ -68,7 +60,7 @@ module Mastodon
(email_domain_block.other_domains || []).uniq.each do |hostname|
another_email_domain_block = EmailDomainBlock.new(domain: hostname, parent: email_domain_block)
if EmailDomainBlock.where(domain: hostname).exists?
if EmailDomainBlock.exists?(domain: hostname)
say("#{hostname} is already blocked.", :yellow)
skipped += 1
next

View file

@ -1,16 +1,10 @@
# frozen_string_literal: true
require 'rubygems/package'
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
module Mastodon
class EmojiCLI < Thor
def self.exit_on_failure?
true
end
require_relative 'base'
module Mastodon::CLI
class Emoji < Base
option :prefix
option :suffix
option :overwrite, type: :boolean
@ -49,7 +43,7 @@ module Mastodon
next if filename.start_with?('._')
shortcode = [options[:prefix], filename, options[:suffix]].compact.join
custom_emoji = CustomEmoji.local.find_by("LOWER(shortcode) = ?", shortcode.downcase)
custom_emoji = CustomEmoji.local.find_by('LOWER(shortcode) = ?', shortcode.downcase)
if custom_emoji && !options[:overwrite]
skipped += 1
@ -68,12 +62,11 @@ module Mastodon
failed += 1
say('Failure/Error: ', :red)
say(entry.full_name)
say(' ' + custom_emoji.errors[:image].join(', '), :red)
say(" #{custom_emoji.errors[:image].join(', ')}", :red)
end
end
end
puts
say("Imported #{imported}, skipped #{skipped}, failed to import #{failed}", color(imported, skipped, failed))
end

View file

@ -1,18 +1,11 @@
# frozen_string_literal: true
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
require_relative 'base'
module Mastodon
class FeedsCLI < Thor
include CLIHelper
module Mastodon::CLI
class Feeds < Base
include Redisable
def self.exit_on_failure?
true
end
option :all, type: :boolean, default: false
option :concurrency, type: :numeric, default: 5, aliases: [:c]
option :verbose, type: :boolean, aliases: [:v]
@ -25,14 +18,12 @@ module Mastodon
Otherwise, a single user specified by USERNAME.
LONG_DESC
def build(username = nil)
dry_run = options[:dry_run] ? '(DRY RUN)' : ''
if options[:all] || username.nil?
processed, = parallelize_with_progress(Account.joins(:user).merge(User.active)) do |account|
PrecomputeFeedService.new.call(account) unless options[:dry_run]
processed, = parallelize_with_progress(active_user_accounts) do |account|
PrecomputeFeedService.new.call(account) unless dry_run?
end
say("Regenerated feeds for #{processed} accounts #{dry_run}", :green, true)
say("Regenerated feeds for #{processed} accounts #{dry_run_mode_suffix}", :green, true)
elsif username.present?
account = Account.find_local(username)
@ -41,9 +32,9 @@ module Mastodon
exit(1)
end
PrecomputeFeedService.new.call(account) unless options[:dry_run]
PrecomputeFeedService.new.call(account) unless dry_run?
say("OK #{dry_run}", :green, true)
say("OK #{dry_run_mode_suffix}", :green, true)
else
say('No account(s) given', :red)
exit(1)
@ -53,12 +44,14 @@ module Mastodon
desc 'clear', 'Remove all home and list feeds from Redis'
def clear
keys = redis.keys('feed:*')
redis.pipelined do
keys.each { |key| redis.del(key) }
end
redis.del(keys)
say('OK', :green)
end
private
def active_user_accounts
Account.joins(:user).merge(User.active)
end
end
end

View file

@ -1,17 +1,11 @@
# frozen_string_literal: true
require 'rubygems/package'
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
require_relative 'base'
module Mastodon
class IpBlocksCLI < Thor
def self.exit_on_failure?
true
end
option :severity, required: true, enum: %w(no_access sign_up_requires_approval), desc: 'Severity of the block'
module Mastodon::CLI
class IpBlocks < Base
option :severity, required: true, enum: %w(no_access sign_up_requires_approval sign_up_block), desc: 'Severity of the block'
option :comment, aliases: [:c], desc: 'Optional comment'
option :duration, aliases: [:d], type: :numeric, desc: 'Duration of the block in seconds'
option :force, type: :boolean, aliases: [:f], desc: 'Overwrite existing blocks'
@ -36,6 +30,12 @@ module Mastodon
failed = 0
addresses.each do |address|
unless valid_ip_address?(address)
say("#{address} is invalid", :red)
failed += 1
next
end
ip_block = IpBlock.find_by(ip: address)
if ip_block.present? && !options[:force]
@ -79,14 +79,18 @@ module Mastodon
skipped = 0
addresses.each do |address|
ip_blocks = begin
if options[:force]
IpBlock.where('ip >>= ?', address)
else
IpBlock.where('ip <<= ?', address)
end
unless valid_ip_address?(address)
say("#{address} is invalid", :yellow)
skipped += 1
next
end
ip_blocks = if options[:force]
IpBlock.where('ip >>= ?', address)
else
IpBlock.where('ip <<= ?', address)
end
if ip_blocks.empty?
say("#{address} is not yet blocked", :yellow)
skipped += 1
@ -110,9 +114,9 @@ module Mastodon
IpBlock.where(severity: :no_access).find_each do |ip_block|
case options[:format]
when 'nginx'
puts "deny #{ip_block.ip}/#{ip_block.ip.prefix};"
say "deny #{ip_block.ip}/#{ip_block.ip.prefix};"
else
puts "#{ip_block.ip}/#{ip_block.ip.prefix}"
say "#{ip_block.ip}/#{ip_block.ip.prefix}"
end
end
end
@ -128,5 +132,12 @@ module Mastodon
:red
end
end
def valid_ip_address?(ip_address)
IPAddr.new(ip_address)
true
rescue IPAddr::InvalidAddressError
false
end
end
end

View file

@ -1,73 +1,69 @@
# frozen_string_literal: true
require 'thor'
require_relative 'mastodon/media_cli'
require_relative 'mastodon/emoji_cli'
require_relative 'mastodon/accounts_cli'
require_relative 'mastodon/feeds_cli'
require_relative 'mastodon/search_cli'
require_relative 'mastodon/settings_cli'
require_relative 'mastodon/statuses_cli'
require_relative 'mastodon/domains_cli'
require_relative 'mastodon/preview_cards_cli'
require_relative 'mastodon/cache_cli'
require_relative 'mastodon/upgrade_cli'
require_relative 'mastodon/email_domain_blocks_cli'
require_relative 'mastodon/canonical_email_blocks_cli'
require_relative 'mastodon/ip_blocks_cli'
require_relative 'mastodon/maintenance_cli'
require_relative 'mastodon/version'
require_relative 'base'
module Mastodon
class CLI < Thor
def self.exit_on_failure?
true
end
require_relative 'accounts'
require_relative 'cache'
require_relative 'canonical_email_blocks'
require_relative 'domains'
require_relative 'email_domain_blocks'
require_relative 'emoji'
require_relative 'feeds'
require_relative 'ip_blocks'
require_relative 'maintenance'
require_relative 'media'
require_relative 'preview_cards'
require_relative 'search'
require_relative 'settings'
require_relative 'statuses'
require_relative 'upgrade'
module Mastodon::CLI
class Main < Base
desc 'media SUBCOMMAND ...ARGS', 'Manage media files'
subcommand 'media', Mastodon::MediaCLI
subcommand 'media', Media
desc 'emoji SUBCOMMAND ...ARGS', 'Manage custom emoji'
subcommand 'emoji', Mastodon::EmojiCLI
subcommand 'emoji', Emoji
desc 'accounts SUBCOMMAND ...ARGS', 'Manage accounts'
subcommand 'accounts', Mastodon::AccountsCLI
subcommand 'accounts', Accounts
desc 'feeds SUBCOMMAND ...ARGS', 'Manage feeds'
subcommand 'feeds', Mastodon::FeedsCLI
subcommand 'feeds', Feeds
desc 'search SUBCOMMAND ...ARGS', 'Manage the search engine'
subcommand 'search', Mastodon::SearchCLI
subcommand 'search', Search
desc 'settings SUBCOMMAND ...ARGS', 'Manage dynamic settings'
subcommand 'settings', Mastodon::SettingsCLI
subcommand 'settings', Settings
desc 'statuses SUBCOMMAND ...ARGS', 'Manage statuses'
subcommand 'statuses', Mastodon::StatusesCLI
subcommand 'statuses', Statuses
desc 'domains SUBCOMMAND ...ARGS', 'Manage account domains'
subcommand 'domains', Mastodon::DomainsCLI
subcommand 'domains', Domains
desc 'preview_cards SUBCOMMAND ...ARGS', 'Manage preview cards'
subcommand 'preview_cards', Mastodon::PreviewCardsCLI
subcommand 'preview_cards', PreviewCards
desc 'cache SUBCOMMAND ...ARGS', 'Manage cache'
subcommand 'cache', Mastodon::CacheCLI
subcommand 'cache', Cache
desc 'upgrade SUBCOMMAND ...ARGS', 'Various version upgrade utilities'
subcommand 'upgrade', Mastodon::UpgradeCLI
subcommand 'upgrade', Upgrade
desc 'email_domain_blocks SUBCOMMAND ...ARGS', 'Manage e-mail domain blocks'
subcommand 'email_domain_blocks', Mastodon::EmailDomainBlocksCLI
subcommand 'email_domain_blocks', EmailDomainBlocks
desc 'ip_blocks SUBCOMMAND ...ARGS', 'Manage IP blocks'
subcommand 'ip_blocks', Mastodon::IpBlocksCLI
subcommand 'ip_blocks', IpBlocks
desc 'canonical_email_blocks SUBCOMMAND ...ARGS', 'Manage canonical e-mail blocks'
subcommand 'canonical_email_blocks', Mastodon::CanonicalEmailBlocksCLI
subcommand 'canonical_email_blocks', CanonicalEmailBlocks
desc 'maintenance SUBCOMMAND ...ARGS', 'Various maintenance utilities'
subcommand 'maintenance', Mastodon::MaintenanceCLI
subcommand 'maintenance', Maintenance
option :dry_run, type: :boolean
desc 'self-destruct', 'Erase the server from the federation'
@ -98,7 +94,7 @@ module Mastodon
exit(1) unless prompt.ask('Type in the domain of the server to confirm:', required: true) == Rails.configuration.x.local_domain
unless options[:dry_run]
unless dry_run?
prompt.warn('This operation WILL NOT be reversible. It can also take a long time.')
prompt.warn('While the data won\'t be erased locally, the server will be in a BROKEN STATE afterwards.')
prompt.warn('A running Sidekiq process is required. Do not shut it down until queues clear.')
@ -108,12 +104,11 @@ module Mastodon
inboxes = Account.inboxes
processed = 0
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
Setting.registrations_mode = 'none' unless options[:dry_run]
Setting.registrations_mode = 'none' unless dry_run?
if inboxes.empty?
Account.local.without_suspended.in_batches.update_all(suspended_at: Time.now.utc, suspension_origin: :local) unless options[:dry_run]
Account.local.without_suspended.in_batches.update_all(suspended_at: Time.now.utc, suspension_origin: :local) unless dry_run?
prompt.ok('It seems like your server has not federated with anything')
prompt.ok('You can shut it down and delete it any time')
return
@ -121,7 +116,7 @@ module Mastodon
prompt.warn('Do NOT interrupt this process...')
delete_account = ->(account) do
delete_account = lambda do |account|
payload = ActiveModelSerializers::SerializableResource.new(
account,
serializer: ActivityPub::DeleteActorSerializer,
@ -130,8 +125,8 @@ module Mastodon
json = Oj.dump(ActivityPub::LinkedDataSignature.new(payload).sign!(account))
unless options[:dry_run]
ActivityPub::DeliveryWorker.push_bulk(inboxes) do |inbox_url|
unless dry_run?
ActivityPub::DeliveryWorker.push_bulk(inboxes, limit: 1_000) do |inbox_url|
[json, account.id, inbox_url]
end
@ -144,7 +139,7 @@ module Mastodon
Account.local.without_suspended.find_each { |account| delete_account.call(account) }
Account.local.suspended.joins(:deletion_request).find_each { |account| delete_account.call(account) }
prompt.ok("Queued #{inboxes.size * processed} items into Sidekiq for #{processed} accounts#{dry_run}")
prompt.ok("Queued #{inboxes.size * processed} items into Sidekiq for #{processed} accounts#{dry_run_mode_suffix}")
prompt.ok('Wait until Sidekiq processes all items, then you can shut everything down and delete the data')
rescue TTY::Reader::InputInterrupt
exit(1)

View file

@ -1,20 +1,11 @@
# frozen_string_literal: true
require 'tty-prompt'
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
require_relative 'base'
module Mastodon
class MaintenanceCLI < Thor
include CLIHelper
def self.exit_on_failure?
true
end
MIN_SUPPORTED_VERSION = 2019_10_01_213028 # rubocop:disable Style/NumericLiterals
MAX_SUPPORTED_VERSION = 2022_11_04_133904 # rubocop:disable Style/NumericLiterals
module Mastodon::CLI
class Maintenance < Base
MIN_SUPPORTED_VERSION = 2019_10_01_213028
MAX_SUPPORTED_VERSION = 2022_11_04_133904
# Stubs to enjoy ActiveRecord queries while not depending on a particular
# version of the code/database
@ -98,11 +89,9 @@ module Mastodon
owned_classes.each do |klass|
klass.where(account_id: other_account.id).find_each do |record|
begin
record.update_attribute(:account_id, id)
rescue ActiveRecord::RecordNotUnique
next
end
record.update_attribute(:account_id, id)
rescue ActiveRecord::RecordNotUnique
next
end
end
@ -111,11 +100,9 @@ module Mastodon
target_classes.each do |klass|
klass.where(target_account_id: other_account.id).find_each do |record|
begin
record.update_attribute(:target_account_id, id)
rescue ActiveRecord::RecordNotUnique
next
end
record.update_attribute(:target_account_id, id)
rescue ActiveRecord::RecordNotUnique
next
end
end
@ -146,25 +133,23 @@ module Mastodon
Mastodon has to be stopped to run this task, which will take a long time and may be destructive.
LONG_DESC
def fix_duplicates
@prompt = TTY::Prompt.new
if ActiveRecord::Migrator.current_version < MIN_SUPPORTED_VERSION
@prompt.error 'Your version of the database schema is too old and is not supported by this script.'
@prompt.error 'Please update to at least Mastodon 3.0.0 before running this script.'
say 'Your version of the database schema is too old and is not supported by this script.', :red
say 'Please update to at least Mastodon 3.0.0 before running this script.', :red
exit(1)
elsif ActiveRecord::Migrator.current_version > MAX_SUPPORTED_VERSION
@prompt.warn 'Your version of the database schema is more recent than this script, this may cause unexpected errors.'
exit(1) unless @prompt.yes?('Continue anyway? (Yes/No)')
say 'Your version of the database schema is more recent than this script, this may cause unexpected errors.', :yellow
exit(1) unless yes?('Continue anyway? (Yes/No)')
end
if Sidekiq::ProcessSet.new.any?
@prompt.error 'It seems Sidekiq is running. All Mastodon processes need to be stopped when using this script.'
say 'It seems Sidekiq is running. All Mastodon processes need to be stopped when using this script.', :red
exit(1)
end
@prompt.warn 'This task will take a long time to run and is potentially destructive.'
@prompt.warn 'Please make sure to stop Mastodon and have a backup.'
exit(1) unless @prompt.yes?('Continue? (Yes/No)')
say 'This task will take a long time to run and is potentially destructive.', :yellow
say 'Please make sure to stop Mastodon and have a backup.', :yellow
exit(1) unless yes?('Continue? (Yes/No)')
deduplicate_users!
deduplicate_account_domain_blocks!
@ -188,7 +173,7 @@ module Mastodon
Scenic.database.refresh_materialized_view('instances', concurrently: true, cascade: false) if ActiveRecord::Migrator.current_version >= 2020_12_06_004238
Rails.cache.clear
@prompt.say 'Finished!'
say 'Finished!'
end
private
@ -196,7 +181,7 @@ module Mastodon
def deduplicate_accounts!
remove_index_if_exists!(:accounts, 'index_accounts_on_username_and_domain_lower')
@prompt.say 'Deduplicating accounts… for local accounts, you will be asked to chose which account to keep unchanged.'
say 'Deduplicating accounts… for local accounts, you will be asked to chose which account to keep unchanged.'
find_duplicate_accounts.each do |row|
accounts = Account.where(id: row['ids'].split(',')).to_a
@ -208,14 +193,14 @@ module Mastodon
end
end
@prompt.say 'Restoring index_accounts_on_username_and_domain_lower…'
if ActiveRecord::Migrator.current_version < 20200620164023 # rubocop:disable Style/NumericLiterals
say 'Restoring index_accounts_on_username_and_domain_lower…'
if ActiveRecord::Migrator.current_version < 2020_06_20_164023
ActiveRecord::Base.connection.add_index :accounts, 'lower (username), lower(domain)', name: 'index_accounts_on_username_and_domain_lower', unique: true
else
ActiveRecord::Base.connection.add_index :accounts, "lower (username), COALESCE(lower(domain), '')", name: 'index_accounts_on_username_and_domain_lower', unique: true
end
@prompt.say 'Reindexing textual indexes on accounts…'
say 'Reindexing textual indexes on accounts…'
ActiveRecord::Base.connection.execute('REINDEX INDEX search_index;')
ActiveRecord::Base.connection.execute('REINDEX INDEX index_accounts_on_uri;')
ActiveRecord::Base.connection.execute('REINDEX INDEX index_accounts_on_url;')
@ -227,73 +212,82 @@ module Mastodon
remove_index_if_exists!(:users, 'index_users_on_remember_token')
remove_index_if_exists!(:users, 'index_users_on_reset_password_token')
@prompt.say 'Deduplicating user records…'
say 'Deduplicating user records…'
# Deduplicating email
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM users GROUP BY email HAVING count(*) > 1").each do |row|
users = User.where(id: row['ids'].split(',')).sort_by(&:updated_at).reverse
ref_user = users.shift
@prompt.warn "Multiple users registered with e-mail address #{ref_user.email}."
@prompt.warn "e-mail will be disabled for the following accounts: #{user.map(&:account).map(&:acct).join(', ')}"
@prompt.warn 'Please reach out to them and set another address with `tootctl account modify` or delete them.'
say "Multiple users registered with e-mail address #{ref_user.email}.", :yellow
say "e-mail will be disabled for the following accounts: #{user.map(&:account).map(&:acct).join(', ')}", :yellow
say 'Please reach out to them and set another address with `tootctl account modify` or delete them.', :yellow
i = 0
users.each do |user|
user.update!(email: "#{i} " + user.email)
users.each_with_index do |user, index|
user.update!(email: "#{index} " + user.email)
end
end
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM users WHERE confirmation_token IS NOT NULL GROUP BY confirmation_token HAVING count(*) > 1").each do |row|
users = User.where(id: row['ids'].split(',')).sort_by(&:created_at).reverse.drop(1)
@prompt.warn "Unsetting confirmation token for those accounts: #{users.map(&:account).map(&:acct).join(', ')}"
deduplicate_users_process_confirmation_token
deduplicate_users_process_remember_token
deduplicate_users_process_password_token
users.each do |user|
user.update!(confirmation_token: nil)
end
end
if ActiveRecord::Migrator.current_version < 20220118183010 # rubocop:disable Style/NumericLiterals
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM users WHERE remember_token IS NOT NULL GROUP BY remember_token HAVING count(*) > 1").each do |row|
users = User.where(id: row['ids'].split(',')).sort_by(&:updated_at).reverse.drop(1)
@prompt.warn "Unsetting remember token for those accounts: #{users.map(&:account).map(&:acct).join(', ')}"
users.each do |user|
user.update!(remember_token: nil)
end
end
end
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM users WHERE reset_password_token IS NOT NULL GROUP BY reset_password_token HAVING count(*) > 1").each do |row|
users = User.where(id: row['ids'].split(',')).sort_by(&:updated_at).reverse.drop(1)
@prompt.warn "Unsetting password reset token for those accounts: #{users.map(&:account).map(&:acct).join(', ')}"
users.each do |user|
user.update!(reset_password_token: nil)
end
end
@prompt.say 'Restoring users indexes…'
say 'Restoring users indexes…'
ActiveRecord::Base.connection.add_index :users, ['confirmation_token'], name: 'index_users_on_confirmation_token', unique: true
ActiveRecord::Base.connection.add_index :users, ['email'], name: 'index_users_on_email', unique: true
ActiveRecord::Base.connection.add_index :users, ['remember_token'], name: 'index_users_on_remember_token', unique: true if ActiveRecord::Migrator.current_version < 20220118183010
ActiveRecord::Base.connection.add_index :users, ['remember_token'], name: 'index_users_on_remember_token', unique: true if ActiveRecord::Migrator.current_version < 2022_01_18_183010
if ActiveRecord::Migrator.current_version < 20220310060641 # rubocop:disable Style/NumericLiterals
if ActiveRecord::Migrator.current_version < 2022_03_10_060641
ActiveRecord::Base.connection.add_index :users, ['reset_password_token'], name: 'index_users_on_reset_password_token', unique: true
else
ActiveRecord::Base.connection.add_index :users, ['reset_password_token'], name: 'index_users_on_reset_password_token', unique: true, where: 'reset_password_token IS NOT NULL', opclass: :text_pattern_ops
end
end
def deduplicate_users_process_confirmation_token
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM users WHERE confirmation_token IS NOT NULL GROUP BY confirmation_token HAVING count(*) > 1").each do |row|
users = User.where(id: row['ids'].split(',')).sort_by(&:created_at).reverse.drop(1)
say "Unsetting confirmation token for those accounts: #{users.map(&:account).map(&:acct).join(', ')}", :yellow
users.each do |user|
user.update!(confirmation_token: nil)
end
end
end
def deduplicate_users_process_remember_token
if ActiveRecord::Migrator.current_version < 2022_01_18_183010
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM users WHERE remember_token IS NOT NULL GROUP BY remember_token HAVING count(*) > 1").each do |row|
users = User.where(id: row['ids'].split(',')).sort_by(&:updated_at).reverse.drop(1)
say "Unsetting remember token for those accounts: #{users.map(&:account).map(&:acct).join(', ')}", :yellow
users.each do |user|
user.update!(remember_token: nil)
end
end
end
end
def deduplicate_users_process_password_token
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM users WHERE reset_password_token IS NOT NULL GROUP BY reset_password_token HAVING count(*) > 1").each do |row|
users = User.where(id: row['ids'].split(',')).sort_by(&:updated_at).reverse.drop(1)
say "Unsetting password reset token for those accounts: #{users.map(&:account).map(&:acct).join(', ')}", :yellow
users.each do |user|
user.update!(reset_password_token: nil)
end
end
end
def deduplicate_account_domain_blocks!
remove_index_if_exists!(:account_domain_blocks, 'index_account_domain_blocks_on_account_id_and_domain')
@prompt.say 'Removing duplicate account domain blocks…'
say 'Removing duplicate account domain blocks…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM account_domain_blocks GROUP BY account_id, domain HAVING count(*) > 1").each do |row|
AccountDomainBlock.where(id: row['ids'].split(',').drop(1)).delete_all
end
@prompt.say 'Restoring account domain blocks indexes…'
ActiveRecord::Base.connection.add_index :account_domain_blocks, ['account_id', 'domain'], name: 'index_account_domain_blocks_on_account_id_and_domain', unique: true
say 'Restoring account domain blocks indexes…'
ActiveRecord::Base.connection.add_index :account_domain_blocks, %w(account_id domain), name: 'index_account_domain_blocks_on_account_id_and_domain', unique: true
end
def deduplicate_account_identity_proofs!
@ -301,13 +295,13 @@ module Mastodon
remove_index_if_exists!(:account_identity_proofs, 'index_account_proofs_on_account_and_provider_and_username')
@prompt.say 'Removing duplicate account identity proofs…'
say 'Removing duplicate account identity proofs…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM account_identity_proofs GROUP BY account_id, provider, provider_username HAVING count(*) > 1").each do |row|
AccountIdentityProof.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy)
end
@prompt.say 'Restoring account identity proofs indexes…'
ActiveRecord::Base.connection.add_index :account_identity_proofs, ['account_id', 'provider', 'provider_username'], name: 'index_account_proofs_on_account_and_provider_and_username', unique: true
say 'Restoring account identity proofs indexes…'
ActiveRecord::Base.connection.add_index :account_identity_proofs, %w(account_id provider provider_username), name: 'index_account_proofs_on_account_and_provider_and_username', unique: true
end
def deduplicate_announcement_reactions!
@ -315,19 +309,19 @@ module Mastodon
remove_index_if_exists!(:announcement_reactions, 'index_announcement_reactions_on_account_id_and_announcement_id')
@prompt.say 'Removing duplicate account identity proofs…'
say 'Removing duplicate account identity proofs…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM announcement_reactions GROUP BY account_id, announcement_id, name HAVING count(*) > 1").each do |row|
AnnouncementReaction.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy)
end
@prompt.say 'Restoring announcement_reactions indexes…'
ActiveRecord::Base.connection.add_index :announcement_reactions, ['account_id', 'announcement_id', 'name'], name: 'index_announcement_reactions_on_account_id_and_announcement_id', unique: true
say 'Restoring announcement_reactions indexes…'
ActiveRecord::Base.connection.add_index :announcement_reactions, %w(account_id announcement_id name), name: 'index_announcement_reactions_on_account_id_and_announcement_id', unique: true
end
def deduplicate_conversations!
remove_index_if_exists!(:conversations, 'index_conversations_on_uri')
@prompt.say 'Deduplicating conversations…'
say 'Deduplicating conversations…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM conversations WHERE uri IS NOT NULL GROUP BY uri HAVING count(*) > 1").each do |row|
conversations = Conversation.where(id: row['ids'].split(',')).sort_by(&:id).reverse
@ -339,8 +333,8 @@ module Mastodon
end
end
@prompt.say 'Restoring conversations indexes…'
if ActiveRecord::Migrator.current_version < 20220307083603 # rubocop:disable Style/NumericLiterals
say 'Restoring conversations indexes…'
if ActiveRecord::Migrator.current_version < 2022_03_07_083603
ActiveRecord::Base.connection.add_index :conversations, ['uri'], name: 'index_conversations_on_uri', unique: true
else
ActiveRecord::Base.connection.add_index :conversations, ['uri'], name: 'index_conversations_on_uri', unique: true, where: 'uri IS NOT NULL', opclass: :text_pattern_ops
@ -350,7 +344,7 @@ module Mastodon
def deduplicate_custom_emojis!
remove_index_if_exists!(:custom_emojis, 'index_custom_emojis_on_shortcode_and_domain')
@prompt.say 'Deduplicating custom_emojis…'
say 'Deduplicating custom_emojis…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM custom_emojis GROUP BY shortcode, domain HAVING count(*) > 1").each do |row|
emojis = CustomEmoji.where(id: row['ids'].split(',')).sort_by(&:id).reverse
@ -362,14 +356,14 @@ module Mastodon
end
end
@prompt.say 'Restoring custom_emojis indexes…'
ActiveRecord::Base.connection.add_index :custom_emojis, ['shortcode', 'domain'], name: 'index_custom_emojis_on_shortcode_and_domain', unique: true
say 'Restoring custom_emojis indexes…'
ActiveRecord::Base.connection.add_index :custom_emojis, %w(shortcode domain), name: 'index_custom_emojis_on_shortcode_and_domain', unique: true
end
def deduplicate_custom_emoji_categories!
remove_index_if_exists!(:custom_emoji_categories, 'index_custom_emoji_categories_on_name')
@prompt.say 'Deduplicating custom_emoji_categories…'
say 'Deduplicating custom_emoji_categories…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM custom_emoji_categories GROUP BY name HAVING count(*) > 1").each do |row|
categories = CustomEmojiCategory.where(id: row['ids'].split(',')).sort_by(&:id).reverse
@ -381,26 +375,26 @@ module Mastodon
end
end
@prompt.say 'Restoring custom_emoji_categories indexes…'
say 'Restoring custom_emoji_categories indexes…'
ActiveRecord::Base.connection.add_index :custom_emoji_categories, ['name'], name: 'index_custom_emoji_categories_on_name', unique: true
end
def deduplicate_domain_allows!
remove_index_if_exists!(:domain_allows, 'index_domain_allows_on_domain')
@prompt.say 'Deduplicating domain_allows…'
say 'Deduplicating domain_allows…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM domain_allows GROUP BY domain HAVING count(*) > 1").each do |row|
DomainAllow.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy)
end
@prompt.say 'Restoring domain_allows indexes…'
say 'Restoring domain_allows indexes…'
ActiveRecord::Base.connection.add_index :domain_allows, ['domain'], name: 'index_domain_allows_on_domain', unique: true
end
def deduplicate_domain_blocks!
remove_index_if_exists!(:domain_blocks, 'index_domain_blocks_on_domain')
@prompt.say 'Deduplicating domain_allows…'
say 'Deduplicating domain_allows…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM domain_blocks GROUP BY domain HAVING count(*) > 1").each do |row|
domain_blocks = DomainBlock.where(id: row['ids'].split(',')).by_severity.reverse.to_a
@ -417,7 +411,7 @@ module Mastodon
domain_blocks.each(&:destroy)
end
@prompt.say 'Restoring domain_blocks indexes…'
say 'Restoring domain_blocks indexes…'
ActiveRecord::Base.connection.add_index :domain_blocks, ['domain'], name: 'index_domain_blocks_on_domain', unique: true
end
@ -426,38 +420,38 @@ module Mastodon
remove_index_if_exists!(:unavailable_domains, 'index_unavailable_domains_on_domain')
@prompt.say 'Deduplicating unavailable_domains…'
say 'Deduplicating unavailable_domains…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM unavailable_domains GROUP BY domain HAVING count(*) > 1").each do |row|
UnavailableDomain.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy)
end
@prompt.say 'Restoring domain_allows indexes…'
say 'Restoring domain_allows indexes…'
ActiveRecord::Base.connection.add_index :unavailable_domains, ['domain'], name: 'index_unavailable_domains_on_domain', unique: true
end
def deduplicate_email_domain_blocks!
remove_index_if_exists!(:email_domain_blocks, 'index_email_domain_blocks_on_domain')
@prompt.say 'Deduplicating email_domain_blocks…'
say 'Deduplicating email_domain_blocks…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM email_domain_blocks GROUP BY domain HAVING count(*) > 1").each do |row|
domain_blocks = EmailDomainBlock.where(id: row['ids'].split(',')).sort_by { |b| b.parent.nil? ? 1 : 0 }.to_a
domain_blocks.drop(1).each(&:destroy)
end
@prompt.say 'Restoring email_domain_blocks indexes…'
say 'Restoring email_domain_blocks indexes…'
ActiveRecord::Base.connection.add_index :email_domain_blocks, ['domain'], name: 'index_email_domain_blocks_on_domain', unique: true
end
def deduplicate_media_attachments!
remove_index_if_exists!(:media_attachments, 'index_media_attachments_on_shortcode')
@prompt.say 'Deduplicating media_attachments…'
say 'Deduplicating media_attachments…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM media_attachments WHERE shortcode IS NOT NULL GROUP BY shortcode HAVING count(*) > 1").each do |row|
MediaAttachment.where(id: row['ids'].split(',').drop(1)).update_all(shortcode: nil)
end
@prompt.say 'Restoring media_attachments indexes…'
if ActiveRecord::Migrator.current_version < 20220310060626 # rubocop:disable Style/NumericLiterals
say 'Restoring media_attachments indexes…'
if ActiveRecord::Migrator.current_version < 2022_03_10_060626
ActiveRecord::Base.connection.add_index :media_attachments, ['shortcode'], name: 'index_media_attachments_on_shortcode', unique: true
else
ActiveRecord::Base.connection.add_index :media_attachments, ['shortcode'], name: 'index_media_attachments_on_shortcode', unique: true, where: 'shortcode IS NOT NULL', opclass: :text_pattern_ops
@ -467,19 +461,19 @@ module Mastodon
def deduplicate_preview_cards!
remove_index_if_exists!(:preview_cards, 'index_preview_cards_on_url')
@prompt.say 'Deduplicating preview_cards…'
say 'Deduplicating preview_cards…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM preview_cards GROUP BY url HAVING count(*) > 1").each do |row|
PreviewCard.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy)
end
@prompt.say 'Restoring preview_cards indexes…'
say 'Restoring preview_cards indexes…'
ActiveRecord::Base.connection.add_index :preview_cards, ['url'], name: 'index_preview_cards_on_url', unique: true
end
def deduplicate_statuses!
remove_index_if_exists!(:statuses, 'index_statuses_on_uri')
@prompt.say 'Deduplicating statuses…'
say 'Deduplicating statuses…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM statuses WHERE uri IS NOT NULL GROUP BY uri HAVING count(*) > 1").each do |row|
statuses = Status.where(id: row['ids'].split(',')).sort_by(&:id)
ref_status = statuses.shift
@ -489,8 +483,8 @@ module Mastodon
end
end
@prompt.say 'Restoring statuses indexes…'
if ActiveRecord::Migrator.current_version < 20220310060706 # rubocop:disable Style/NumericLiterals
say 'Restoring statuses indexes…'
if ActiveRecord::Migrator.current_version < 2022_03_10_060706
ActiveRecord::Base.connection.add_index :statuses, ['uri'], name: 'index_statuses_on_uri', unique: true
else
ActiveRecord::Base.connection.add_index :statuses, ['uri'], name: 'index_statuses_on_uri', unique: true, where: 'uri IS NOT NULL', opclass: :text_pattern_ops
@ -501,7 +495,7 @@ module Mastodon
remove_index_if_exists!(:tags, 'index_tags_on_name_lower')
remove_index_if_exists!(:tags, 'index_tags_on_name_lower_btree')
@prompt.say 'Deduplicating tags…'
say 'Deduplicating tags…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM tags GROUP BY lower((name)::text) HAVING count(*) > 1").each do |row|
tags = Tag.where(id: row['ids'].split(',')).sort_by { |t| [t.usable?, t.trendable?, t.listable?].count(false) }
ref_tag = tags.shift
@ -511,8 +505,8 @@ module Mastodon
end
end
@prompt.say 'Restoring tags indexes…'
if ActiveRecord::Migrator.current_version < 20210421121431
say 'Restoring tags indexes…'
if ActiveRecord::Migrator.current_version < 2021_04_21_121431
ActiveRecord::Base.connection.add_index :tags, 'lower((name)::text)', name: 'index_tags_on_name_lower', unique: true
else
ActiveRecord::Base.connection.execute 'CREATE UNIQUE INDEX CONCURRENTLY index_tags_on_name_lower_btree ON tags (lower(name) text_pattern_ops)'
@ -524,12 +518,12 @@ module Mastodon
remove_index_if_exists!(:webauthn_credentials, 'index_webauthn_credentials_on_external_id')
@prompt.say 'Deduplicating webauthn_credentials…'
say 'Deduplicating webauthn_credentials…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM webauthn_credentials GROUP BY external_id HAVING count(*) > 1").each do |row|
WebauthnCredential.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy)
end
@prompt.say 'Restoring webauthn_credentials indexes…'
say 'Restoring webauthn_credentials indexes…'
ActiveRecord::Base.connection.add_index :webauthn_credentials, ['external_id'], name: 'index_webauthn_credentials_on_external_id', unique: true
end
@ -538,28 +532,37 @@ module Mastodon
remove_index_if_exists!(:webhooks, 'index_webhooks_on_url')
@prompt.say 'Deduplicating webhooks…'
say 'Deduplicating webhooks…'
ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM webhooks GROUP BY url HAVING count(*) > 1").each do |row|
Webhooks.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy)
end
@prompt.say 'Restoring webhooks indexes…'
say 'Restoring webhooks indexes…'
ActiveRecord::Base.connection.add_index :webhooks, ['url'], name: 'index_webhooks_on_url', unique: true
end
def deduplicate_local_accounts!(accounts)
accounts = accounts.sort_by(&:id).reverse
@prompt.warn "Multiple local accounts were found for username '#{accounts.first.username}'."
@prompt.warn 'All those accounts are distinct accounts but only the most recently-created one is fully-functional.'
say "Multiple local accounts were found for username '#{accounts.first.username}'.", :yellow
say 'All those accounts are distinct accounts but only the most recently-created one is fully-functional.', :yellow
accounts.each_with_index do |account, idx|
@prompt.say '%2d. %s: created at: %s; updated at: %s; last logged in at: %s; statuses: %5d; last status at: %s' % [idx, account.username, account.created_at, account.updated_at, account.user&.last_sign_in_at&.to_s || 'N/A', account.account_stat&.statuses_count || 0, account.account_stat&.last_status_at || 'N/A']
say format(
'%<index>2d. %<username>s: created at: %<created_at>s; updated at: %<updated_at>s; last logged in at: %<last_log_in_at>s; statuses: %<status_count>5d; last status at: %<last_status_at>s',
index: idx,
username: account.username,
created_at: account.created_at,
updated_at: account.updated_at,
last_log_in_at: account.user&.last_sign_in_at&.to_s || 'N/A',
status_count: account.account_stat&.statuses_count || 0,
last_status_at: account.account_stat&.last_status_at || 'N/A'
)
end
@prompt.say 'Please chose the one to keep unchanged, other ones will be automatically renamed.'
say 'Please chose the one to keep unchanged, other ones will be automatically renamed.'
ref_id = @prompt.ask('Account to keep unchanged:') do |q|
ref_id = ask('Account to keep unchanged:') do |q|
q.required true
q.default 0
q.convert :int
@ -601,11 +604,9 @@ module Mastodon
owned_classes = [ConversationMute, AccountConversation]
owned_classes.each do |klass|
klass.where(conversation_id: duplicate_conv.id).find_each do |record|
begin
record.update_attribute(:account_id, main_conv.id)
rescue ActiveRecord::RecordNotUnique
next
end
record.update_attribute(:account_id, main_conv.id)
rescue ActiveRecord::RecordNotUnique
next
end
end
end
@ -629,47 +630,37 @@ module Mastodon
owned_classes << Bookmark if ActiveRecord::Base.connection.table_exists?(:bookmarks)
owned_classes.each do |klass|
klass.where(status_id: duplicate_status.id).find_each do |record|
begin
record.update_attribute(:status_id, main_status.id)
rescue ActiveRecord::RecordNotUnique
next
end
end
end
StatusPin.where(account_id: main_status.account_id, status_id: duplicate_status.id).find_each do |record|
begin
record.update_attribute(:status_id, main_status.id)
rescue ActiveRecord::RecordNotUnique
next
end
end
StatusPin.where(account_id: main_status.account_id, status_id: duplicate_status.id).find_each do |record|
record.update_attribute(:status_id, main_status.id)
rescue ActiveRecord::RecordNotUnique
next
end
Status.where(in_reply_to_id: duplicate_status.id).find_each do |record|
begin
record.update_attribute(:in_reply_to_id, main_status.id)
rescue ActiveRecord::RecordNotUnique
next
end
record.update_attribute(:in_reply_to_id, main_status.id)
rescue ActiveRecord::RecordNotUnique
next
end
Status.where(reblog_of_id: duplicate_status.id).find_each do |record|
begin
record.update_attribute(:reblog_of_id, main_status.id)
rescue ActiveRecord::RecordNotUnique
next
end
record.update_attribute(:reblog_of_id, main_status.id)
rescue ActiveRecord::RecordNotUnique
next
end
end
def merge_tags!(main_tag, duplicate_tag)
[FeaturedTag].each do |klass|
klass.where(tag_id: duplicate_tag.id).find_each do |record|
begin
record.update_attribute(:tag_id, main_tag.id)
rescue ActiveRecord::RecordNotUnique
next
end
record.update_attribute(:tag_id, main_tag.id)
rescue ActiveRecord::RecordNotUnique
next
end
end
end
@ -680,9 +671,7 @@ module Mastodon
def remove_index_if_exists!(table, name)
ActiveRecord::Base.connection.remove_index(table, name: name)
rescue ArgumentError
nil
rescue ActiveRecord::StatementInvalid
rescue ArgumentError, ActiveRecord::StatementInvalid
nil
end
end

View file

@ -1,17 +1,12 @@
# frozen_string_literal: true
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
require_relative 'base'
module Mastodon
class MediaCLI < Thor
module Mastodon::CLI
class Media < Base
include ActionView::Helpers::NumberHelper
include CLIHelper
def self.exit_on_failure?
true
end
VALID_PATH_SEGMENTS_SIZE = [7, 10].freeze
option :days, type: :numeric, default: 7, aliases: [:d]
option :prune_profiles, type: :boolean, default: false
@ -22,7 +17,7 @@ module Mastodon
desc 'remove', 'Remove remote media files, headers or avatars'
long_desc <<-DESC
Removes locally cached copies of media attachments (and optionally profile
headers and avatars) from other servers. By default, only media attachements
headers and avatars) from other servers. By default, only media attachments
are removed.
The --days option specifies how old media attachments have to be before
they are removed. In case of avatars and headers, it specifies how old
@ -35,18 +30,17 @@ module Mastodon
follow status. By default, only accounts that are not followed by or
following anyone locally are pruned.
DESC
# rubocop:disable Metrics/PerceivedComplexity
def remove
if options[:prune_profiles] && options[:remove_headers]
say('--prune-profiles and --remove-headers should not be specified simultaneously', :red, true)
exit(1)
end
if options[:include_follows] && !(options[:prune_profiles] || options[:remove_headers])
say('--include-follows can only be used with --prune-profiles or --remove-headers', :red, true)
exit(1)
end
time_ago = options[:days].days.ago
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
time_ago = options[:days].days.ago
if options[:prune_profiles] || options[:remove_headers]
processed, aggregate = parallelize_with_progress(Account.remote.where({ last_webfingered_at: ..time_ago, updated_at: ..time_ago })) do |account|
@ -57,7 +51,7 @@ module Mastodon
size = (account.header_file_size || 0)
size += (account.avatar_file_size || 0) if options[:prune_profiles]
unless options[:dry_run]
unless dry_run?
account.header.destroy
account.avatar.destroy if options[:prune_profiles]
account.save!
@ -66,7 +60,7 @@ module Mastodon
size
end
say("Visited #{processed} accounts and removed profile media totaling #{number_to_human_size(aggregate)}#{dry_run}", :green, true)
say("Visited #{processed} accounts and removed profile media totaling #{number_to_human_size(aggregate)}#{dry_run_mode_suffix}", :green, true)
end
unless options[:prune_profiles] || options[:remove_headers]
@ -75,7 +69,7 @@ module Mastodon
size = (media_attachment.file_file_size || 0) + (media_attachment.thumbnail_file_size || 0)
unless options[:dry_run]
unless dry_run?
media_attachment.file.destroy
media_attachment.thumbnail.destroy
media_attachment.save
@ -84,7 +78,7 @@ module Mastodon
size
end
say("Removed #{processed} media attachments (approx. #{number_to_human_size(aggregate)})#{dry_run}", :green, true)
say("Removed #{processed} media attachments (approx. #{number_to_human_size(aggregate)})#{dry_run_mode_suffix}", :green, true)
end
end
@ -103,7 +97,6 @@ module Mastodon
progress = create_progress_bar(nil)
reclaimed_bytes = 0
removed = 0
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
prefix = options[:prefix]
case Paperclip::Attachment.default_options[:storage]
@ -116,13 +109,11 @@ module Mastodon
loop do
objects = begin
begin
bucket.objects(start_after: last_key, prefix: prefix).limit(1000).map { |x| x }
rescue => e
progress.log(pastel.red("Error fetching list of files: #{e}"))
progress.log("If you want to continue from this point, add --start-after=#{last_key} to your command") if last_key
break
end
bucket.objects(start_after: last_key, prefix: prefix).limit(1000).map { |x| x }
rescue => e
progress.log(pastel.red("Error fetching list of files: #{e}"))
progress.log("If you want to continue from this point, add --start-after=#{last_key} to your command") if last_key
break
end
break if objects.empty?
@ -131,12 +122,12 @@ module Mastodon
record_map = preload_records_from_mixed_objects(objects)
objects.each do |object|
object.acl.put(acl: s3_permissions) if options[:fix_permissions] && !options[:dry_run]
object.acl.put(acl: s3_permissions) if options[:fix_permissions] && !dry_run?
path_segments = object.key.split('/')
path_segments.delete('cache')
unless [7, 10].include?(path_segments.size)
unless VALID_PATH_SEGMENTS_SIZE.include?(path_segments.size)
progress.log(pastel.yellow("Unrecognized file found: #{object.key}"))
next
end
@ -153,7 +144,7 @@ module Mastodon
next unless attachment.blank? || !attachment.variant?(file_name)
begin
object.delete unless options[:dry_run]
object.delete unless dry_run?
reclaimed_bytes += object.size
removed += 1
@ -167,6 +158,9 @@ module Mastodon
when :fog
say('The fog storage driver is not supported for this operation at this time', :red)
exit(1)
when :azure
say('The azure storage driver is not supported for this operation at this time', :red)
exit(1)
when :filesystem
require 'find'
@ -180,7 +174,7 @@ module Mastodon
path_segments = key.split(File::SEPARATOR)
path_segments.delete('cache')
unless [7, 10].include?(path_segments.size)
unless VALID_PATH_SEGMENTS_SIZE.include?(path_segments.size)
progress.log(pastel.yellow("Unrecognized file found: #{key}"))
next
end
@ -202,7 +196,7 @@ module Mastodon
begin
size = File.size(path)
unless options[:dry_run]
unless dry_run?
File.delete(path)
begin
FileUtils.rmdir(File.dirname(path), parents: true)
@ -224,9 +218,8 @@ module Mastodon
progress.total = progress.progress
progress.finish
say("Removed #{removed} orphans (approx. #{number_to_human_size(reclaimed_bytes)})#{dry_run}", :green, true)
say("Removed #{removed} orphans (approx. #{number_to_human_size(reclaimed_bytes)})#{dry_run_mode_suffix}", :green, true)
end
# rubocop:enable Metrics/PerceivedComplexity
option :account, type: :string
option :domain, type: :string
@ -255,8 +248,6 @@ module Mastodon
not be re-downloaded. To force re-download of every URL, use --force.
DESC
def refresh
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
if options[:status]
scope = MediaAttachment.where(status_id: options[:status])
elsif options[:account]
@ -277,15 +268,13 @@ module Mastodon
exit(1)
end
if options[:days].present?
scope = scope.where('media_attachments.id > ?', Mastodon::Snowflake.id_at(options[:days].days.ago, with_random: false))
end
scope = scope.where('media_attachments.id > ?', Mastodon::Snowflake.id_at(options[:days].days.ago, with_random: false)) if options[:days].present?
processed, aggregate = parallelize_with_progress(scope) do |media_attachment|
next if media_attachment.remote_url.blank? || (!options[:force] && media_attachment.file_file_name.present?)
next if DomainBlock.reject_media?(media_attachment.account.domain)
unless options[:dry_run]
unless dry_run?
media_attachment.reset_file!
media_attachment.reset_thumbnail!
media_attachment.save
@ -294,7 +283,7 @@ module Mastodon
media_attachment.file_file_size + (media_attachment.thumbnail_file_size || 0)
end
say("Downloaded #{processed} media attachments (approx. #{number_to_human_size(aggregate)})#{dry_run}", :green, true)
say("Downloaded #{processed} media attachments (approx. #{number_to_human_size(aggregate)})#{dry_run_mode_suffix}", :green, true)
end
desc 'usage', 'Calculate disk space consumed by Mastodon'
@ -316,7 +305,7 @@ module Mastodon
path_segments = path.split('/')[2..]
path_segments.delete('cache')
unless [7, 10].include?(path_segments.size)
unless VALID_PATH_SEGMENTS_SIZE.include?(path_segments.size)
say('Not a media URL', :red)
exit(1)
end
@ -369,7 +358,7 @@ module Mastodon
segments = object.key.split('/')
segments.delete('cache')
next unless [7, 10].include?(segments.size)
next unless VALID_PATH_SEGMENTS_SIZE.include?(segments.size)
model_name = segments.first.classify
record_id = segments[2..-2].join.to_i

View file

@ -1,18 +1,11 @@
# frozen_string_literal: true
require 'tty-prompt'
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
require_relative 'base'
module Mastodon
class PreviewCardsCLI < Thor
module Mastodon::CLI
class PreviewCards < Base
include ActionView::Helpers::NumberHelper
include CLIHelper
def self.exit_on_failure?
true
end
option :days, type: :numeric, default: 180
option :concurrency, type: :numeric, default: 5, aliases: [:c]
@ -34,7 +27,6 @@ module Mastodon
DESC
def remove
time_ago = options[:days].days.ago
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
link = options[:link] ? 'link-type ' : ''
scope = PreviewCard.cached
scope = scope.where(type: :link) if options[:link]
@ -45,7 +37,7 @@ module Mastodon
size = preview_card.image_file_size
unless options[:dry_run]
unless dry_run?
preview_card.image.destroy
preview_card.save
end
@ -53,7 +45,7 @@ module Mastodon
size
end
say("Removed #{processed} #{link}preview cards (approx. #{number_to_human_size(aggregate)})#{dry_run}", :green, true)
say("Removed #{processed} #{link}preview cards (approx. #{number_to_human_size(aggregate)})#{dry_run_mode_suffix}", :green, true)
end
end
end

View file

@ -0,0 +1,87 @@
# frozen_string_literal: true
dev_null = Logger.new('/dev/null')
Rails.logger = dev_null
ActiveRecord::Base.logger = dev_null
ActiveJob::Base.logger = dev_null
HttpLog.configuration.logger = dev_null
Paperclip.options[:log] = false
Chewy.logger = dev_null
require 'ruby-progressbar/outputs/null'
module Mastodon::CLI
module ProgressHelper
PROGRESS_FORMAT = '%c/%u |%b%i| %e'
def create_progress_bar(total = nil)
ProgressBar.create(
{
total: total,
format: PROGRESS_FORMAT,
}.merge(progress_output_options)
)
end
def parallelize_with_progress(scope)
if options[:concurrency] < 1
say('Cannot run with this concurrency setting, must be at least 1', :red)
exit(1)
end
reset_connection_pools!
progress = create_progress_bar(scope.count)
pool = Concurrent::FixedThreadPool.new(options[:concurrency])
total = Concurrent::AtomicFixnum.new(0)
aggregate = Concurrent::AtomicFixnum.new(0)
scope.reorder(nil).find_in_batches do |items|
futures = []
items.each do |item|
futures << Concurrent::Future.execute(executor: pool) do
if !progress.total.nil? && progress.progress + 1 > progress.total
# The number of items has changed between start and now,
# since there is no good way to predict the final count from
# here, just change the progress bar to an indeterminate one
progress.total = nil
end
progress.log("Processing #{item.id}") if options[:verbose]
Chewy.strategy(:mastodon) do
result = ActiveRecord::Base.connection_pool.with_connection do
yield(item)
ensure
RedisConfiguration.pool.checkin if Thread.current[:redis]
Thread.current[:redis] = nil
end
aggregate.increment(result) if result.is_a?(Integer)
end
rescue => e
progress.log pastel.red("Error processing #{item.id}: #{e}")
ensure
progress.increment
end
end
total.increment(items.size)
futures.map(&:value)
end
progress.stop
[total.value, aggregate.value]
end
private
def progress_output_options
Rails.env.test? ? { output: ProgressBar::Outputs::Null } : {}
end
end
end

View file

@ -1,16 +1,13 @@
# frozen_string_literal: true
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
module Mastodon
class SearchCLI < Thor
include CLIHelper
require_relative 'base'
module Mastodon::CLI
class Search < Base
# Indices are sorted by amount of data to be expected in each, so that
# smaller indices can go online sooner
INDICES = [
InstancesIndex,
AccountsIndex,
TagsIndex,
StatusesIndex,
@ -18,9 +15,10 @@ module Mastodon
option :concurrency, type: :numeric, default: 5, aliases: [:c], desc: 'Workload will be split between this number of threads'
option :batch_size, type: :numeric, default: 100, aliases: [:b], desc: 'Number of records in each batch'
option :only, type: :array, enum: %w(accounts tags statuses), desc: 'Only process these indices'
option :only, type: :array, enum: %w(instances accounts tags statuses), desc: 'Only process these indices'
option :import, type: :boolean, default: true, desc: 'Import data from the database to the index'
option :clean, type: :boolean, default: true, desc: 'Remove outdated documents from the index'
option :reset_chewy, type: :boolean, default: false, desc: "Reset Chewy's internal index"
desc 'deploy', 'Create or upgrade Elasticsearch indices and populate them'
long_desc <<~LONG_DESC
If Elasticsearch is empty, this command will create the necessary indices
@ -33,28 +31,20 @@ module Mastodon
database will be imported into the indices, unless overridden with --no-import.
LONG_DESC
def deploy
if options[:concurrency] < 1
say('Cannot run with this concurrency setting, must be at least 1', :red)
exit(1)
end
verify_deploy_options!
if options[:batch_size] < 1
say('Cannot run with this batch_size setting, must be at least 1', :red)
exit(1)
end
indices = begin
if options[:only]
options[:only].map { |str| "#{str.camelize}Index".constantize }
else
INDICES
end
end
indices = if options[:only]
options[:only].map { |str| "#{str.camelize}Index".constantize }
else
INDICES
end
pool = Concurrent::FixedThreadPool.new(options[:concurrency], max_queue: options[:concurrency] * 10)
importers = indices.index_with { |index| "Importer::#{index.name}Importer".constantize.new(batch_size: options[:batch_size], executor: pool) }
progress = ProgressBar.create(total: nil, format: '%t%c/%u |%b%i| %e (%r docs/s)', autofinish: false)
Chewy::Stash::Specification.reset! if options[:reset_chewy]
# First, ensure all indices are created and have the correct
# structure, so that live data can already be written
indices.select { |index| index.specification.changed? }.each do |index|
@ -104,5 +94,26 @@ module Mastodon
say("Indexed #{added} records, de-indexed #{removed}", :green, true)
end
private
def verify_deploy_options!
verify_deploy_concurrency!
verify_deploy_batch_size!
end
def verify_deploy_concurrency!
return unless options[:concurrency] < 1
say('Cannot run with this concurrency setting, must be at least 1', :red)
exit(1)
end
def verify_deploy_batch_size!
return unless options[:batch_size] < 1
say('Cannot run with this batch_size setting, must be at least 1', :red)
exit(1)
end
end
end

View file

@ -1,15 +1,9 @@
# frozen_string_literal: true
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
module Mastodon
class RegistrationsCLI < Thor
def self.exit_on_failure?
true
end
require_relative 'base'
module Mastodon::CLI
class Registrations < Base
desc 'open', 'Open registrations'
def open
Setting.registrations_mode = 'open'
@ -37,8 +31,8 @@ module Mastodon
end
end
class SettingsCLI < Thor
class Settings < Base
desc 'registrations SUBCOMMAND ...ARGS', 'Manage state of registrations'
subcommand 'registrations', RegistrationsCLI
subcommand 'registrations', Registrations
end
end

View file

@ -1,18 +1,11 @@
# frozen_string_literal: true
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
require_relative 'base'
module Mastodon
class StatusesCLI < Thor
include CLIHelper
module Mastodon::CLI
class Statuses < Base
include ActionView::Helpers::NumberHelper
def self.exit_on_failure?
true
end
option :days, type: :numeric, default: 90
option :batch_size, type: :numeric, default: 1_000, aliases: [:b], desc: 'Number of records in each batch'
option :continue, type: :boolean, default: false, desc: 'If remove is not completed, execute from the previous continuation'
@ -68,7 +61,7 @@ module Mastodon
# Skip accounts followed by local accounts
clean_followed_sql = 'AND NOT EXISTS (SELECT 1 FROM follows WHERE statuses.account_id = follows.target_account_id)' unless options[:clean_followed]
ActiveRecord::Base.connection.exec_insert(<<-SQL.squish, 'SQL', [[nil, max_id]])
ActiveRecord::Base.connection.exec_insert(<<-SQL.squish, 'SQL', [max_id])
INSERT INTO statuses_to_be_deleted (id)
SELECT statuses.id FROM statuses WHERE deleted_at IS NULL AND NOT local AND uri IS NOT NULL AND (id < $1)
AND NOT EXISTS (SELECT 1 FROM statuses AS statuses1 WHERE statuses.id = statuses1.in_reply_to_id)

View file

@ -1,17 +1,9 @@
# frozen_string_literal: true
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
module Mastodon
class UpgradeCLI < Thor
include CLIHelper
def self.exit_on_failure?
true
end
require_relative 'base'
module Mastodon::CLI
class Upgrade < Base
CURRENT_STORAGE_SCHEMA_VERSION = 1
option :dry_run, type: :boolean, default: false
@ -25,7 +17,6 @@ module Mastodon
LONG_DESC
def storage_schema
progress = create_progress_bar(nil)
dry_run = dry_run? ? ' (DRY RUN)' : ''
records = 0
klasses = [
@ -50,16 +41,16 @@ module Mastodon
styles << :original unless styles.include?(:original)
styles.each do |style|
success = begin
case Paperclip::Attachment.default_options[:storage]
when :s3
upgrade_storage_s3(progress, attachment, style)
when :fog
upgrade_storage_fog(progress, attachment, style)
when :filesystem
upgrade_storage_filesystem(progress, attachment, style)
end
end
success = case Paperclip::Attachment.default_options[:storage]
when :s3
upgrade_storage_s3(progress, attachment, style)
when :fog
upgrade_storage_fog(progress, attachment, style)
when :azure
upgrade_storage_azure(progress, attachment, style)
when :filesystem
upgrade_storage_filesystem(progress, attachment, style)
end
upgraded = true if style == :original && success
@ -79,7 +70,7 @@ module Mastodon
progress.total = progress.progress
progress.finish
say("Upgraded storage schema of #{records} records#{dry_run}", :green, true)
say("Upgraded storage schema of #{records} records#{dry_run_mode_suffix}", :green, true)
end
private
@ -116,6 +107,11 @@ module Mastodon
exit(1)
end
def upgrade_storage_azure(_progress, _attachment, _style)
say('The azure storage driver is not supported for this operation at this time', :red)
exit(1)
end
def upgrade_storage_filesystem(progress, attachment, style)
previous_storage_schema_version = attachment.storage_schema_version
previous_path = attachment.path(style)

View file

@ -1,87 +0,0 @@
# frozen_string_literal: true
dev_null = Logger.new('/dev/null')
Rails.logger = dev_null
ActiveRecord::Base.logger = dev_null
ActiveJob::Base.logger = dev_null
HttpLog.configuration.logger = dev_null
Paperclip.options[:log] = false
Chewy.logger = dev_null
module Mastodon
module CLIHelper
def dry_run?
options[:dry_run]
end
def create_progress_bar(total = nil)
ProgressBar.create(total: total, format: '%c/%u |%b%i| %e')
end
def reset_connection_pools!
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations[Rails.env].dup.tap { |config| config['pool'] = options[:concurrency] + 1 })
RedisConfiguration.establish_pool(options[:concurrency])
end
def parallelize_with_progress(scope)
if options[:concurrency] < 1
say('Cannot run with this concurrency setting, must be at least 1', :red)
exit(1)
end
reset_connection_pools!
progress = create_progress_bar(scope.count)
pool = Concurrent::FixedThreadPool.new(options[:concurrency])
total = Concurrent::AtomicFixnum.new(0)
aggregate = Concurrent::AtomicFixnum.new(0)
scope.reorder(nil).find_in_batches do |items|
futures = []
items.each do |item|
futures << Concurrent::Future.execute(executor: pool) do
begin
if !progress.total.nil? && progress.progress + 1 > progress.total
# The number of items has changed between start and now,
# since there is no good way to predict the final count from
# here, just change the progress bar to an indeterminate one
progress.total = nil
end
progress.log("Processing #{item.id}") if options[:verbose]
Chewy.strategy(:mastodon) do
result = ActiveRecord::Base.connection_pool.with_connection do
yield(item)
ensure
RedisConfiguration.pool.checkin if Thread.current[:redis]
Thread.current[:redis] = nil
end
aggregate.increment(result) if result.is_a?(Integer)
end
rescue => e
progress.log pastel.red("Error processing #{item.id}: #{e}")
ensure
progress.increment
end
end
end
total.increment(items.size)
futures.map(&:value)
end
progress.stop
[total.value, aggregate.value]
end
def pastel
@pastel ||= Pastel.new
end
end
end

View file

@ -195,7 +195,14 @@ module Mastodon
def supports_drop_index_concurrently?
version = select_one("SELECT current_setting('server_version_num') AS v")['v'].to_i
version >= 90200
version >= 90_200
end
# Only available on Postgresql >= 11
def supports_add_column_with_default?
version = select_one("SELECT current_setting('server_version_num') AS v")['v'].to_i
version >= 11_000
end
# Adds a foreign key with only minimal locking on the tables involved.
@ -289,8 +296,6 @@ module Mastodon
# determines this method to be too complex while there's no way to make it
# less "complex" without introducing extra methods (which actually will
# make things _more_ complex).
#
# rubocop: disable Metrics/AbcSize
def update_column_in_batches(table_name, column, value)
if transaction_open?
raise 'update_column_in_batches can not be run inside a transaction, ' \
@ -416,6 +421,11 @@ module Mastodon
# This method can also take a block which is passed directly to the
# `update_column_in_batches` method.
def add_column_with_default(table, column, type, default:, limit: nil, allow_null: false, &block)
if supports_add_column_with_default?
add_column(table, column, type, default: default, limit: limit, null: allow_null)
return
end
if transaction_open?
raise 'add_column_with_default can not be run inside a transaction, ' \
'you can disable transactions by calling disable_ddl_transaction! ' \
@ -573,7 +583,7 @@ module Mastodon
o.conname as name,
o.confdeltype as on_delete
from pg_constraint o
left join pg_class f on f.oid = o.confrelid
left join pg_class f on f.oid = o.confrelid
left join pg_class c on c.oid = o.conrelid
left join pg_class m on m.oid = o.conrelid
where o.contype = 'f'

View file

@ -0,0 +1,55 @@
# frozen_string_literal: true
module Mastodon
module MigrationWarning
WARNING_SECONDS = 10
DEFAULT_WARNING = <<~WARNING_MESSAGE
WARNING: This migration may take a *long* time for large instances.
It will *not* lock tables for any significant time, but it may run
for a very long time. We will pause for #{WARNING_SECONDS} seconds to allow you to
interrupt this migration if you are not ready.
WARNING_MESSAGE
def migration_duration_warning(explanation = nil)
return unless valid_environment?
announce_warning(explanation)
announce_countdown
end
private
def announce_countdown
WARNING_SECONDS.downto(1) do |i|
say "Continuing in #{i} second#{i == 1 ? '' : 's'}...", true
sleep 1
end
end
def valid_environment?
$stdout.isatty && Rails.env.production?
end
def announce_warning(explanation)
announce_message prepare_message(explanation)
end
def announce_message(text)
say ''
text.each_line do |line|
say(line)
end
say ''
end
def prepare_message(explanation)
if explanation.blank?
DEFAULT_WARNING
else
DEFAULT_WARNING + "\n#{explanation}"
end
end
end
end

View file

@ -12,11 +12,11 @@ module PremailerWebpackStrategy
css = if url.start_with?('http')
HTTP.get(url).to_s
else
url = url[1..-1] if url.start_with?('/')
File.read(Rails.public_path.join(url))
url = url[1..] if url.start_with?('/')
Rails.public_path.join(url).read
end
css.gsub(/url\(\//, "url(#{asset_host}/")
css.gsub(%r{url\(/}, "url(#{asset_host}/")
end
module_function :load

View file

@ -1,17 +1,17 @@
# frozen_string_literal: true
def setup_redis_env_url(prefix = nil, defaults = true)
prefix = prefix.to_s.upcase + '_' unless prefix.nil?
prefix = "#{prefix.to_s.upcase}_" unless prefix.nil?
prefix = '' if prefix.nil?
return if ENV[prefix + 'REDIS_URL'].present?
return if ENV["#{prefix}REDIS_URL"].present?
password = ENV.fetch(prefix + 'REDIS_PASSWORD') { '' if defaults }
host = ENV.fetch(prefix + 'REDIS_HOST') { 'localhost' if defaults }
port = ENV.fetch(prefix + 'REDIS_PORT') { 6379 if defaults }
db = ENV.fetch(prefix + 'REDIS_DB') { 0 if defaults }
password = ENV.fetch("#{prefix}REDIS_PASSWORD") { '' if defaults }
host = ENV.fetch("#{prefix}REDIS_HOST") { 'localhost' if defaults }
port = ENV.fetch("#{prefix}REDIS_PORT") { 6379 if defaults }
db = ENV.fetch("#{prefix}REDIS_DB") { 0 if defaults }
ENV[prefix + 'REDIS_URL'] = begin
ENV["#{prefix}REDIS_URL"] = begin
if [password, host, port, db].all?(&:nil?)
ENV['REDIS_URL']
else
@ -27,7 +27,7 @@ setup_redis_env_url(:cache, false)
setup_redis_env_url(:sidekiq, false)
namespace = ENV.fetch('REDIS_NAMESPACE', nil)
cache_namespace = namespace ? namespace + '_cache' : 'cache'
cache_namespace = namespace ? "#{namespace}_cache" : 'cache'
sidekiq_namespace = namespace
REDIS_CACHE_PARAMS = {
@ -35,7 +35,7 @@ REDIS_CACHE_PARAMS = {
url: ENV['CACHE_REDIS_URL'],
expires_in: 10.minutes,
namespace: cache_namespace,
pool_size: Sidekiq.server? ? Sidekiq.options[:concurrency] : Integer(ENV['MAX_THREADS'] || 5),
pool_size: Sidekiq.server? ? Sidekiq[:concurrency] : Integer(ENV['MAX_THREADS'] || 5),
pool_timeout: 5,
connect_timeout: 5,
}.freeze
@ -46,6 +46,4 @@ REDIS_SIDEKIQ_PARAMS = {
namespace: sidekiq_namespace,
}.freeze
if Rails.env.test?
ENV['REDIS_NAMESPACE'] = "mastodon_test#{ENV['TEST_ENV_NUMBER']}"
end
ENV['REDIS_NAMESPACE'] = "mastodon_test#{ENV['TEST_ENV_NUMBER']}" if Rails.env.test?

View file

@ -64,46 +64,7 @@ module Mastodon::Snowflake
def define_timestamp_id
return if already_defined?
connection.execute(<<~SQL)
CREATE OR REPLACE FUNCTION timestamp_id(table_name text)
RETURNS bigint AS
$$
DECLARE
time_part bigint;
sequence_base bigint;
tail bigint;
BEGIN
time_part := (
-- Get the time in milliseconds
((date_part('epoch', now()) * 1000))::bigint
-- And shift it over two bytes
<< 16);
sequence_base := (
'x' ||
-- Take the first two bytes (four hex characters)
substr(
-- Of the MD5 hash of the data we documented
md5(table_name || '#{SecureRandom.hex(16)}' || time_part::text),
1, 4
)
-- And turn it into a bigint
)::bit(16)::bigint;
-- Finally, add our sequence number to our base, and chop
-- it to the last two bytes
tail := (
(sequence_base + nextval(table_name || '_id_seq'))
& 65535);
-- Return the time part and the sequence part. OR appears
-- faster here than addition, but they're equivalent:
-- time_part has no trailing two bytes, and tail is only
-- the last two bytes.
RETURN time_part | tail;
END
$$ LANGUAGE plpgsql VOLATILE;
SQL
connection.execute(sanitized_timestamp_id_sql)
end
def ensure_id_sequences_exist
@ -115,7 +76,7 @@ module Mastodon::Snowflake
# And only those that are using timestamp_id.
next unless (data = DEFAULT_REGEX.match(id_col.default_function))
seq_name = data[:seq_prefix] + '_id_seq'
seq_name = "#{data[:seq_prefix]}_id_seq"
# If we were on Postgres 9.5+, we could do CREATE SEQUENCE IF
# NOT EXISTS, but we can't depend on that. Instead, catch the
@ -146,13 +107,64 @@ module Mastodon::Snowflake
private
def already_defined?
connection.execute(<<~SQL).values.first.first
connection.execute(<<~SQL.squish).values.first.first
SELECT EXISTS(
SELECT * FROM pg_proc WHERE proname = 'timestamp_id'
);
SQL
end
def sanitized_timestamp_id_sql
ActiveRecord::Base.sanitize_sql_array(timestamp_id_sql_array)
end
def timestamp_id_sql_array
[timestamp_id_sql_string, { random_string: SecureRandom.hex(16) }]
end
def timestamp_id_sql_string
<<~SQL
CREATE OR REPLACE FUNCTION timestamp_id(table_name text)
RETURNS bigint AS
$$
DECLARE
time_part bigint;
sequence_base bigint;
tail bigint;
BEGIN
time_part := (
-- Get the time in milliseconds
((date_part('epoch', now()) * 1000))::bigint
-- And shift it over two bytes
<< 16);
sequence_base := (
'x' ||
-- Take the first two bytes (four hex characters)
substr(
-- Of the MD5 hash of the data we documented
md5(table_name || :random_string || time_part::text),
1, 4
)
-- And turn it into a bigint
)::bit(16)::bigint;
-- Finally, add our sequence number to our base, and chop
-- it to the last two bytes
tail := (
(sequence_base + nextval(table_name || '_id_seq'))
& 65535);
-- Return the time part and the sequence part. OR appears
-- faster here than addition, but they're equivalent:
-- time_part has no trailing two bytes, and tail is only
-- the last two bytes.
RETURN time_part | tail;
END
$$ LANGUAGE plpgsql VOLATILE;
SQL
end
def connection
ActiveRecord::Base.connection
end

View file

@ -9,7 +9,7 @@ module Mastodon
end
def minor
1
2
end
def patch
@ -17,11 +17,11 @@ module Mastodon
end
def flags
''
ENV.fetch('MASTODON_VERSION_FLAGS', '-beta2')
end
def suffix
'~bgme'
ENV.fetch('MASTODON_VERSION_SUFFIX', '')
end
def to_a

View file

@ -8,7 +8,7 @@ module Paperclip
# monkey-patch to avoid unlinking too avoid unlinking source file too early
# see https://github.com/kreeti/kt-paperclip/issues/64
def post_process_style(name, style) #:nodoc:
def post_process_style(name, style) # :nodoc:
raise "Style #{name} has no processors defined." if style.processors.blank?
intermediate_files = []
@ -16,16 +16,16 @@ module Paperclip
# if we're processing the original, close + unlink the source tempfile
intermediate_files << original if name == :original
@queued_for_write[name] = style.processors.
inject(original) do |file, processor|
@queued_for_write[name] = style.processors
.inject(original) do |file, processor|
file = Paperclip.processor(processor).make(file, style.processor_options, self)
intermediate_files << file unless file == original
file
end
unadapted_file = @queued_for_write[name]
@queued_for_write[name] = Paperclip.io_adapters.
for(@queued_for_write[name], @options[:adapter_options])
@queued_for_write[name] = Paperclip.io_adapters
.for(@queued_for_write[name], @options[:adapter_options])
unadapted_file.close if unadapted_file.respond_to?(:close)
@queued_for_write[name]
rescue Paperclip::Errors::NotIdentifiedByImageMagickError => e

View file

@ -79,8 +79,8 @@ module Paperclip
private
def w3c_contrast(color1, color2)
luminance1 = color1.to_xyz.y * 0.01 + 0.05
luminance2 = color2.to_xyz.y * 0.01 + 0.05
luminance1 = (color1.to_xyz.y * 0.01) + 0.05
luminance2 = (color2.to_xyz.y * 0.01) + 0.05
if luminance1 > luminance2
luminance1 / luminance2
@ -109,11 +109,11 @@ module Paperclip
case max
when r
h = (g - b) / d + (g < b ? 6.0 : 0)
h = ((g - b) / d) + (g < b ? 6.0 : 0)
when g
h = (b - r) / d + 2.0
h = ((b - r) / d) + 2.0
when b
h = (r - g) / d + 4.0
h = ((r - g) / d) + 4.0
end
h /= 6.0
@ -126,9 +126,9 @@ module Paperclip
t += 1 if t.negative?
t -= 1 if t > 1
return (p + (q - p) * 6 * t) if t < 1 / 6.0
return (p + ((q - p) * 6 * t)) if t < 1 / 6.0
return q if t < 1 / 2.0
return (p + (q - p) * (2 / 3.0 - t) * 6) if t < 2 / 3.0
return (p + ((q - p) * ((2 / 3.0) - t) * 6)) if t < 2 / 3.0
p
end
@ -147,11 +147,11 @@ module Paperclip
g = l.to_f
b = l.to_f # achromatic
else
q = l < 0.5 ? l * (s + 1) : l + s - l * s
p = 2 * l - q
r = hue_to_rgb(p, q, h + 1 / 3.0)
q = l < 0.5 ? l * (s + 1) : l + s - (l * s)
p = (2 * l) - q
r = hue_to_rgb(p, q, h + (1 / 3.0))
g = hue_to_rgb(p, q, h)
b = hue_to_rgb(p, q, h - 1 / 3.0)
b = hue_to_rgb(p, q, h - (1 / 3.0))
end
[(r * 255).round, (g * 255).round, (b * 255).round]
@ -161,21 +161,19 @@ module Paperclip
def lighten_or_darken(color, by)
hue, saturation, light = rgb_to_hsl(color.r, color.g, color.b)
light = begin
if light < 50
[100, light + by].min
else
[0, light - by].max
end
end
light = if light < 50
[100, light + by].min
else
[0, light - by].max
end
ColorDiff::Color::RGB.new(*hsl_to_rgb(hue, saturation, light))
end
def palette_from_histogram(result, quantity)
frequencies = result.scan(/([0-9]+)\:/).flatten.map(&:to_f)
frequencies = result.scan(/([0-9]+):/).flatten.map(&:to_f)
hex_values = result.scan(/\#([0-9A-Fa-f]{6,8})/).flatten
total_frequencies = frequencies.reduce(&:+).to_f
total_frequencies = frequencies.sum.to_f
frequencies.map.with_index { |f, i| [f / total_frequencies, hex_values[i]] }
.sort_by { |r| -r[0] }
@ -185,7 +183,7 @@ module Paperclip
end
def rgb_to_hex(rgb)
'#%02x%02x%02x' % [rgb.r, rgb.g, rgb.b]
format('#%02x%02x%02x', rgb.r, rgb.g, rgb.b)
end
end
end

View file

@ -57,7 +57,7 @@ class GifReader
end
# Skip lzw min code size
raise InvalidValue unless s.read(1).unpack('C')[0] >= 2
raise InvalidValue unless s.read(1).unpack1('C') >= 2
# Skip image data sub-blocks
skip_sub_blocks!(s)
@ -77,7 +77,7 @@ class GifReader
private
def skip_extension_block!(file)
if EXTENSION_LABELS.include?(file.read(1).unpack('C')[0])
if EXTENSION_LABELS.include?(file.read(1).unpack1('C'))
block_size, = file.read(1).unpack('C')
file.seek(block_size, IO::SEEK_CUR)
end
@ -109,7 +109,7 @@ module Paperclip
final_file = Paperclip::Transcoder.make(file, options, attachment)
if options[:style] == :original
attachment.instance.file_file_name = File.basename(attachment.instance.file_file_name, '.*') + '.mp4'
attachment.instance.file_file_name = "#{File.basename(attachment.instance.file_file_name, '.*')}.mp4"
attachment.instance.file_content_type = 'video/mp4'
attachment.instance.type = MediaAttachment.types[:gifv]
end

View file

@ -2,13 +2,15 @@
module Paperclip
module MediaTypeSpoofDetectorExtensions
MARCEL_MIME_TYPES = %w(audio/mpeg image/avif).freeze
def calculated_content_type
return @calculated_content_type if defined?(@calculated_content_type)
@calculated_content_type = type_from_file_command.chomp
# The `file` command fails to recognize some MP3 files as such
@calculated_content_type = type_from_marcel if @calculated_content_type == 'application/octet-stream' && type_from_marcel == 'audio/mpeg'
@calculated_content_type = type_from_marcel if @calculated_content_type == 'application/octet-stream' && type_from_marcel.in?(MARCEL_MIME_TYPES)
@calculated_content_type
end

View file

@ -7,7 +7,7 @@ module Paperclip
def make
return @file unless options[:format]
target_extension = '.' + options[:format]
target_extension = ".#{options[:format]}"
extension = File.extname(attachment.instance_read(:file_name))
return @file unless options[:style] == :original && target_extension && extension != target_extension

View file

@ -1,9 +1,11 @@
# frozen_string_literal: true
module Rails
module EngineExtensions
# Rewrite task loading code to filter digitalocean.rake task
def run_tasks_blocks(app)
Railtie.instance_method(:run_tasks_blocks).bind_call(self, app)
paths["lib/tasks"].existent.reject { |ext| ext.end_with?('digitalocean.rake') }.sort.each { |ext| load(ext) }
paths['lib/tasks'].existent.reject { |ext| ext.end_with?('digitalocean.rake') }.sort.each { |ext| load(ext) }
end
end
end

View file

@ -36,46 +36,42 @@ class Sanitize
node['class'] = class_list.join(' ')
end
TRANSLATE_TRANSFORMER = lambda do |env|
node = env[:node]
node.remove_attribute('translate') unless node['translate'] == 'no'
end
UNSUPPORTED_HREF_TRANSFORMER = lambda do |env|
return unless env[:node_name] == 'a'
current_node = env[:node]
scheme = begin
if current_node['href'] =~ Sanitize::REGEX_PROTOCOL
Regexp.last_match(1).downcase
else
:relative
end
end
scheme = if current_node['href'] =~ Sanitize::REGEX_PROTOCOL
Regexp.last_match(1).downcase
else
:relative
end
current_node.replace(Nokogiri::XML::Text.new(current_node.text, current_node.document)) unless LINK_PROTOCOLS.include?(scheme)
end
UNSUPPORTED_ELEMENTS_TRANSFORMER = lambda do |env|
return unless %w(h1 h2 h3 h4 h5 h6 blockquote pre ul ol li).include?(env[:node_name])
return unless %w(h1 h2 h3 h4 h5 h6).include?(env[:node_name])
current_node = env[:node]
case env[:node_name]
when 'li'
current_node.traverse do |node|
next unless %w(p ul ol li).include?(node.name)
node.add_next_sibling('<br>') if node.next_sibling
node.replace(node.children) unless node.text?
end
else
current_node.name = 'p'
end
current_node.name = 'strong'
current_node.wrap('<p></p>')
end
MASTODON_STRICT ||= freeze_config(
elements: %w(p br span a),
elements: %w(p br span a del pre blockquote code b strong u i em ul ol li),
attributes: {
'a' => %w(href rel class),
'span' => %w(class),
'a' => %w(href rel class translate),
'span' => %w(class translate),
'ol' => %w(start reversed),
'li' => %w(value),
},
add_attributes: {
@ -89,6 +85,7 @@ class Sanitize
transformers: [
CLASS_WHITELIST_TRANSFORMER,
TRANSLATE_TRANSFORMER,
UNSUPPORTED_ELEMENTS_TRANSFORMER,
UNSUPPORTED_HREF_TRANSFORMER,
]
@ -98,15 +95,15 @@ class Sanitize
elements: %w(audio embed iframe source video),
attributes: {
'audio' => %w(controls),
'embed' => %w(height src type width),
'audio' => %w(controls),
'embed' => %w(height src type width),
'iframe' => %w(allowfullscreen frameborder height scrolling src width),
'source' => %w(src type),
'video' => %w(controls height loop width),
'video' => %w(controls height loop width),
},
protocols: {
'embed' => { 'src' => HTTP_PROTOCOLS },
'embed' => { 'src' => HTTP_PROTOCOLS },
'iframe' => { 'src' => HTTP_PROTOCOLS },
'source' => { 'src' => HTTP_PROTOCOLS },
},

View file

@ -3,42 +3,42 @@
if Rails.env.development?
task :set_annotation_options do
Annotate.set_defaults(
'routes' => 'false',
'models' => 'true',
'position_in_routes' => 'before',
'position_in_class' => 'before',
'position_in_test' => 'before',
'position_in_fixture' => 'before',
'position_in_factory' => 'before',
'position_in_serializer' => 'before',
'show_foreign_keys' => 'false',
'show_indexes' => 'false',
'simple_indexes' => 'false',
'model_dir' => 'app/models',
'root_dir' => '',
'include_version' => 'false',
'require' => '',
'exclude_tests' => 'true',
'exclude_fixtures' => 'true',
'exclude_factories' => 'true',
'exclude_serializers' => 'true',
'exclude_scaffolds' => 'true',
'exclude_controllers' => 'true',
'exclude_helpers' => 'true',
'ignore_model_sub_dir' => 'false',
'ignore_columns' => nil,
'ignore_routes' => nil,
'ignore_unknown_models' => 'false',
'routes' => 'false',
'models' => 'true',
'position_in_routes' => 'before',
'position_in_class' => 'before',
'position_in_test' => 'before',
'position_in_fixture' => 'before',
'position_in_factory' => 'before',
'position_in_serializer' => 'before',
'show_foreign_keys' => 'false',
'show_indexes' => 'false',
'simple_indexes' => 'false',
'model_dir' => 'app/models',
'root_dir' => '',
'include_version' => 'false',
'require' => '',
'exclude_tests' => 'true',
'exclude_fixtures' => 'true',
'exclude_factories' => 'true',
'exclude_serializers' => 'true',
'exclude_scaffolds' => 'true',
'exclude_controllers' => 'true',
'exclude_helpers' => 'true',
'ignore_model_sub_dir' => 'false',
'ignore_columns' => nil,
'ignore_routes' => nil,
'ignore_unknown_models' => 'false',
'hide_limit_column_types' => 'integer,boolean',
'skip_on_db_migrate' => 'false',
'format_bare' => 'true',
'format_rdoc' => 'false',
'format_markdown' => 'false',
'sort' => 'false',
'force' => 'false',
'trace' => 'false',
'wrapper_open' => nil,
'wrapper_close' => nil
'skip_on_db_migrate' => 'false',
'format_bare' => 'true',
'format_rdoc' => 'false',
'format_markdown' => 'false',
'sort' => 'false',
'force' => 'false',
'trace' => 'false',
'wrapper_open' => nil,
'wrapper_close' => nil
)
end

View file

@ -1,3 +1,5 @@
# frozen_string_literal: true
namespace :branding do
desc 'Generate necessary graphic assets for branding from source SVG files'
task generate: :environment do

View file

@ -4,16 +4,14 @@ namespace :db do
namespace :migrate do
desc 'Setup the db or migrate depending on state of db'
task setup: :environment do
begin
if ActiveRecord::Migrator.current_version.zero?
Rake::Task['db:migrate'].invoke
Rake::Task['db:seed'].invoke
end
rescue ActiveRecord::NoDatabaseError
Rake::Task['db:setup'].invoke
else
if ActiveRecord::Migrator.current_version.zero?
Rake::Task['db:migrate'].invoke
Rake::Task['db:seed'].invoke
end
rescue ActiveRecord::NoDatabaseError
Rake::Task['db:setup'].invoke
else
Rake::Task['db:migrate'].invoke
end
end

View file

@ -6,7 +6,7 @@ def gen_border(codepoint, color)
doc = File.open(input) { |f| Nokogiri::XML(f) }
svg = doc.at_css('svg')
if svg.key?('viewBox')
view_box = svg['viewBox'].split(' ').map(&:to_i)
view_box = svg['viewBox'].split.map(&:to_i)
view_box[0] -= 2
view_box[1] -= 2
view_box[2] += 4
@ -31,12 +31,12 @@ def gen_border(codepoint, color)
end
def codepoints_to_filename(codepoints)
codepoints.downcase.gsub(/\A[0]+/, '').tr(' ', '-')
codepoints.downcase.gsub(/\A0+/, '').tr(' ', '-')
end
def codepoints_to_unicode(codepoints)
if codepoints.include?(' ')
codepoints.split(' ').map(&:hex).pack('U*')
codepoints.split.map(&:hex).pack('U*')
else
[codepoints.hex].pack('U')
end
@ -69,7 +69,7 @@ namespace :emojis do
end
end
existence_maps = grouped_codes.map { |c| c.index_with { |cc| File.exist?(Rails.public_path.join('emoji', "#{codepoints_to_filename(cc)}.svg")) } }
existence_maps = grouped_codes.map { |c| c.index_with { |cc| Rails.public_path.join('emoji', "#{codepoints_to_filename(cc)}.svg").exist? } }
map = {}
existence_maps.each do |group|

View file

@ -21,7 +21,7 @@ namespace :mastodon do
env['LOCAL_DOMAIN'] = prompt.ask('Domain name:') do |q|
q.required true
q.modify :strip
q.validate(/\A[a-z0-9\.\-]+\z/i)
q.validate(/\A[a-z0-9.-]+\z/i)
q.messages[:valid?] = 'Invalid domain. If you intend to use unicode characters, enter punycode here'
end
@ -92,7 +92,7 @@ namespace :mastodon do
prompt.ok 'Database configuration works! 🎆'
db_connection_works = true
break
rescue StandardError => e
rescue => e
prompt.error 'Database connection could not be established with this configuration, try again.'
prompt.error e.message
break unless prompt.yes?('Try again?')
@ -132,7 +132,7 @@ namespace :mastodon do
redis.ping
prompt.ok 'Redis configuration works! 🎆'
break
rescue StandardError => e
rescue => e
prompt.error 'Redis connection could not be established with this configuration, try again.'
prompt.error e.message
break unless prompt.yes?('Try again?')
@ -240,7 +240,7 @@ namespace :mastodon do
end
env['S3_PROTOCOL'] = env['S3_ENDPOINT'].start_with?('https') ? 'https' : 'http'
env['S3_HOSTNAME'] = env['S3_ENDPOINT'].gsub(/\Ahttps?:\/\//, '')
env['S3_HOSTNAME'] = env['S3_ENDPOINT'].gsub(%r{\Ahttps?://}, '')
env['S3_BUCKET'] = prompt.ask('Minio bucket name:') do |q|
q.required true
@ -264,12 +264,12 @@ namespace :mastodon do
env['S3_ENDPOINT'] = prompt.ask('Storj DCS endpoint URL:') do |q|
q.required true
q.default "https://gateway.storjshare.io"
q.default 'https://gateway.storjshare.io'
q.modify :strip
end
env['S3_PROTOCOL'] = env['S3_ENDPOINT'].start_with?('https') ? 'https' : 'http'
env['S3_HOSTNAME'] = env['S3_ENDPOINT'].gsub(/\Ahttps?:\/\//, '')
env['S3_HOSTNAME'] = env['S3_ENDPOINT'].gsub(%r{\Ahttps?://}, '')
env['S3_BUCKET'] = prompt.ask('Storj DCS bucket name:') do |q|
q.required true
@ -286,13 +286,13 @@ namespace :mastodon do
q.required true
q.modify :strip
end
linksharing_access_key = prompt.ask('Storj Linksharing access key (uplink share --register --public --readonly=true --disallow-lists --not-after=none sj://bucket):') do |q|
q.required true
q.modify :strip
end
env['S3_ALIAS_HOST'] = "link.storjshare.io/raw/#{linksharing_access_key}/#{env['S3_BUCKET']}"
when 'Google Cloud Storage'
env['S3_ENABLED'] = 'true'
env['S3_PROTOCOL'] = 'https'
@ -399,14 +399,14 @@ namespace :mastodon do
end
ActionMailer::Base.smtp_settings = {
port: env['SMTP_PORT'],
address: env['SMTP_SERVER'],
user_name: env['SMTP_LOGIN'].presence,
password: env['SMTP_PASSWORD'].presence,
domain: env['LOCAL_DOMAIN'],
authentication: env['SMTP_AUTH_METHOD'] == 'none' ? nil : env['SMTP_AUTH_METHOD'] || :plain,
openssl_verify_mode: env['SMTP_OPENSSL_VERIFY_MODE'],
enable_starttls: enable_starttls,
port: env['SMTP_PORT'],
address: env['SMTP_SERVER'],
user_name: env['SMTP_LOGIN'].presence,
password: env['SMTP_PASSWORD'].presence,
domain: env['LOCAL_DOMAIN'],
authentication: env['SMTP_AUTH_METHOD'] == 'none' ? nil : env['SMTP_AUTH_METHOD'] || :plain,
openssl_verify_mode: env['SMTP_OPENSSL_VERIFY_MODE'],
enable_starttls: enable_starttls,
enable_starttls_auto: enable_starttls_auto,
}
@ -417,7 +417,7 @@ namespace :mastodon do
mail = ActionMailer::Base.new.mail to: send_to, subject: 'Test', body: 'Mastodon SMTP configuration works!'
mail.deliver
break
rescue StandardError => e
rescue => e
prompt.error 'E-mail could not be sent with this configuration, try again.'
prompt.error e.message
break unless prompt.yes?('Try again?')
@ -438,14 +438,9 @@ namespace :mastodon do
"#{key}=#{escaped}"
end.join("\n")
generated_header = "# Generated with mastodon:setup on #{Time.now.utc}\n\n".dup
generated_header = generate_header(incompatible_syntax)
if incompatible_syntax
generated_header << "# Some variables in this file will be interpreted differently whether you are\n"
generated_header << "# using docker-compose or not.\n\n"
end
File.write(Rails.root.join('.env.production'), "#{generated_header}#{env_contents}\n")
Rails.root.join('.env.production').write("#{generated_header}#{env_contents}\n")
if using_docker
prompt.ok 'Below is your configuration, save it to an .env.production file outside Docker:'
@ -538,6 +533,19 @@ namespace :mastodon do
puts "VAPID_PUBLIC_KEY=#{vapid_key.public_key}"
end
end
private
def generate_header(include_warning)
default_message = "# Generated with mastodon:setup on #{Time.now.utc}\n\n"
default_message.tap do |string|
if include_warning
string << "# Some variables in this file will be interpreted differently whether you are\n"
string << "# using docker-compose or not.\n\n"
end
end
end
end
def disable_log_stdout!
@ -573,7 +581,7 @@ def dotenv_escape(value)
# As long as the value doesn't include single quotes, we can safely
# rely on single quotes
return "'#{value}'" unless /[']/.match?(value)
return "'#{value}'" unless value.include?("'")
# If the value contains the string '\n' or '\r' we simply can't use
# a double-quoted string, because Dotenv will expand \n or \r no

View file

@ -5,7 +5,7 @@ REPOSITORY_NAME = 'mastodon/mastodon'
namespace :repo do
desc 'Generate the AUTHORS.md file'
task :authors do
file = File.open(Rails.root.join('AUTHORS.md'), 'w')
file = Rails.root.join('AUTHORS.md').open('w')
file << <<~HEADER
Authors
@ -49,8 +49,8 @@ namespace :repo do
File.open(path, 'r') do |file|
file.each_line do |line|
if line.start_with?('-')
new_line = line.gsub(/#([[:digit:]]+)*/) do |pull_request_reference|
pull_request_number = pull_request_reference[1..-1]
new_line = line.gsub(/[(]#([[:digit:]]+)[)]\Z/) do |pull_request_reference|
pull_request_number = pull_request_reference[2..-2]
response = nil
loop do
@ -66,7 +66,7 @@ namespace :repo do
end
pull_request = Oj.load(response.to_s)
"[#{pull_request['user']['login']}](#{pull_request['html_url']})"
"([#{pull_request['user']['login']}](#{pull_request['html_url']}))"
end
tmp.puts new_line
@ -87,12 +87,12 @@ namespace :repo do
task check_locales_files: :environment do
pastel = Pastel.new
missing_yaml_files = I18n.available_locales.reject { |locale| File.exist?(Rails.root.join('config', 'locales', "#{locale}.yml")) }
missing_json_files = I18n.available_locales.reject { |locale| File.exist?(Rails.root.join('app', 'javascript', 'mastodon', 'locales', "#{locale}.json")) }
missing_yaml_files = I18n.available_locales.reject { |locale| Rails.root.join('config', 'locales', "#{locale}.yml").exist? }
missing_json_files = I18n.available_locales.reject { |locale| Rails.root.join('app', 'javascript', 'mastodon', 'locales', "#{locale}.json").exist? }
locales_in_files = Dir[Rails.root.join('config', 'locales', '*.yml')].map do |path|
file_name = File.basename(path)
file_name.gsub(/\A(doorkeeper|devise|activerecord|simple_form)\./, '').gsub(/\.yml\z/, '').to_sym
file_name = File.basename(path, '.yml')
file_name.gsub(/\A(doorkeeper|devise|activerecord|simple_form)\./, '').to_sym
end.uniq.compact
missing_available_locales = locales_in_files - I18n.available_locales

11
lib/tasks/spec.rake Normal file
View file

@ -0,0 +1,11 @@
# frozen_string_literal: true
if Rake::Task.task_defined?('spec:system')
namespace :spec do
task :enable_system_specs do # rubocop:disable Rails/RakeEnvironment
ENV['RUN_SYSTEM_SPECS'] = 'true'
end
end
Rake::Task['spec:system'].enhance ['spec:enable_system_specs']
end

View file

@ -7,13 +7,13 @@ namespace :mastodon do
task :stats do
require 'rails/code_statistics'
[
%w(App\ Libraries app/lib),
['App Libraries', 'app/lib'],
%w(Presenters app/presenters),
%w(Services app/services),
%w(Validators app/validators),
%w(Workers app/workers),
].each do |name, dir|
::STATS_DIRECTORIES << [name, Rails.root.join(dir)]
STATS_DIRECTORIES << [name, Rails.root.join(dir)]
end
end
end

View file

@ -25,7 +25,7 @@ namespace :tests do
end
if Account.where(domain: Rails.configuration.x.local_domain).exists?
puts 'Faux remote accounts not properly claned up'
puts 'Faux remote accounts not properly cleaned up'
exit(1)
end
@ -53,6 +53,21 @@ namespace :tests do
puts 'Admin::ActionLog email domain block records not updated as expected'
exit(1)
end
unless User.find(1).settings['notification_emails.favourite'] == true && User.find(1).settings['notification_emails.mention'] == false
puts 'User settings not kept as expected'
exit(1)
end
unless Account.find_remote('bob', 'ActivityPub.com').domain == 'activitypub.com'
puts 'Account domains not properly normalized'
exit(1)
end
unless Status.find(12).preview_cards.pluck(:url) == ['https://joinmastodon.org/']
puts 'Preview cards not deduplicated as expected'
exit(1)
end
end
desc 'Populate the database with test data for 2.4.3'
@ -98,12 +113,17 @@ namespace :tests do
(1, 'destroy', 'EmailDomainBlock', 1, now(), now()),
(1, 'destroy', 'Status', 1, now(), now()),
(1, 'destroy', 'CustomEmoji', 3, now(), now());
INSERT INTO "settings"
(id, thing_type, thing_id, var, value, created_at, updated_at)
VALUES
(3, 'User', 1, 'notification_emails', E'--- !ruby/hash:ActiveSupport::HashWithIndifferentAccess\nfollow: false\nreblog: true\nfavourite: true\nmention: false\nfollow_request: true\ndigest: true\nreport: true\npending_account: false\ntrending_tag: true\nappeal: true\n', now(), now());
SQL
end
desc 'Populate the database with test data for 2.4.0'
task populate_v2_4: :environment do # rubocop:disable Naming/VariableNumber
ActiveRecord::Base.connection.execute(<<~SQL)
ActiveRecord::Base.connection.execute(<<~SQL.squish)
INSERT INTO "settings"
(id, thing_type, thing_id, var, value, created_at, updated_at)
VALUES
@ -150,7 +170,7 @@ namespace :tests do
INSERT INTO "accounts"
(id, username, domain, private_key, public_key, created_at, updated_at, protocol, inbox_url, outbox_url, followers_url)
VALUES
(6, 'bob', 'activitypub.com', NULL, #{remote_public_key_ap}, now(), now(),
(6, 'bob', 'ActivityPub.com', NULL, #{remote_public_key_ap}, now(), now(),
1, 'https://activitypub.com/users/bob/inbox', 'https://activitypub.com/users/bob/outbox', 'https://activitypub.com/users/bob/followers');
INSERT INTO "accounts"
@ -223,6 +243,11 @@ namespace :tests do
(10, 2, '@admin hey!', NULL, 1, 3, now(), now()),
(11, 1, '@user hey!', 10, 1, 3, now(), now());
INSERT INTO "statuses"
(id, account_id, text, created_at, updated_at)
VALUES
(12, 1, 'check out https://joinmastodon.org/', now(), now());
-- mentions (from previous statuses)
INSERT INTO "mentions"
@ -311,6 +336,21 @@ namespace :tests do
(1, 6, 2, 'Follow', 2, now(), now()),
(2, 2, 1, 'Mention', 4, now(), now()),
(3, 1, 2, 'Mention', 5, now(), now());
-- preview cards
INSERT INTO "preview_cards"
(id, url, title, created_at, updated_at)
VALUES
(1, 'https://joinmastodon.org/', 'Mastodon - Decentralized social media', now(), now());
-- many-to-many association between preview cards and statuses
INSERT INTO "preview_cards_statuses"
(status_id, preview_card_id)
VALUES
(12, 1),
(12, 1);
SQL
end
end

View file

@ -1,8 +0,0 @@
# frozen_string_literal: true
class <%= migration_class_name %> < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def change
end
end

View file

@ -13,7 +13,7 @@ module Terrapin
def pipe_options
# Add some flags to explicitly close the other end of the pipes
{ out: @stdout_out, err: @stderr_out, @stdout_in => :close, @stderr_in => :close }
{ :out => @stdout_out, :err => @stderr_out, @stdout_in => :close, @stderr_in => :close }
end
def read

View file

@ -13,7 +13,14 @@ module Webpacker::HelperExtensions
def preload_pack_asset(name, **options)
src, integrity = current_webpacker_instance.manifest.lookup!(name, with_integrity: true)
preload_link_tag(src, options.merge(integrity: integrity))
# This attribute will only work if the assets are on a different domain.
# And Webpack will (correctly) only add it in this case, so we need to conditionally set it here
# otherwise the preloaded request and the real request will have different crossorigin values
# and the preloaded file wont be loaded
crossorigin = 'anonymous' if Rails.configuration.action_controller.asset_host.present?
preload_link_tag(src, options.merge(integrity: integrity, crossorigin: crossorigin))
end
end