2018-08-25 11:25:39 +00:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
require_relative '../../config/boot'
|
|
|
|
require_relative '../../config/environment'
|
2018-08-26 14:53:06 +00:00
|
|
|
require_relative 'cli_helper'
|
2018-08-25 11:25:39 +00:00
|
|
|
|
|
|
|
module Mastodon
|
|
|
|
class MediaCLI < Thor
|
2018-11-08 20:06:26 +00:00
|
|
|
include ActionView::Helpers::NumberHelper
|
2019-09-10 11:48:48 +00:00
|
|
|
include CLIHelper
|
2018-11-08 20:06:26 +00:00
|
|
|
|
2018-10-25 14:05:33 +00:00
|
|
|
def self.exit_on_failure?
|
|
|
|
true
|
|
|
|
end
|
2018-10-27 20:56:16 +00:00
|
|
|
|
2019-09-10 11:48:48 +00:00
|
|
|
option :days, type: :numeric, default: 7, aliases: [:d]
|
|
|
|
option :concurrency, type: :numeric, default: 5, aliases: [:c]
|
|
|
|
option :verbose, type: :boolean, default: false, aliases: [:v]
|
2018-08-31 01:46:13 +00:00
|
|
|
option :dry_run, type: :boolean, default: false
|
2018-08-26 18:21:03 +00:00
|
|
|
desc 'remove', 'Remove remote media files'
|
2018-08-25 11:25:39 +00:00
|
|
|
long_desc <<-DESC
|
|
|
|
Removes locally cached copies of media attachments from other servers.
|
|
|
|
|
|
|
|
The --days option specifies how old media attachments have to be before
|
|
|
|
they are removed. It defaults to 7 days.
|
|
|
|
DESC
|
|
|
|
def remove
|
2019-09-10 11:48:48 +00:00
|
|
|
time_ago = options[:days].days.ago
|
|
|
|
dry_run = options[:dry_run] ? '(DRY RUN)' : ''
|
2018-08-25 11:25:39 +00:00
|
|
|
|
2019-09-10 11:48:48 +00:00
|
|
|
processed, aggregate = parallelize_with_progress(MediaAttachment.cached.where.not(remote_url: '').where('created_at < ?', time_ago)) do |media_attachment|
|
|
|
|
next if media_attachment.file.blank?
|
|
|
|
|
|
|
|
size = media_attachment.file_file_size
|
2018-08-25 11:25:39 +00:00
|
|
|
|
2019-09-10 11:48:48 +00:00
|
|
|
unless options[:dry_run]
|
|
|
|
media_attachment.file.destroy
|
|
|
|
media_attachment.save
|
|
|
|
end
|
2018-08-26 14:53:06 +00:00
|
|
|
|
2019-09-10 11:48:48 +00:00
|
|
|
size
|
2018-08-26 14:53:06 +00:00
|
|
|
end
|
2019-09-10 11:48:48 +00:00
|
|
|
|
|
|
|
say("Removed #{processed} media attachments (approx. #{number_to_human_size(aggregate)}) #{dry_run}", :green, true)
|
2018-08-25 11:25:39 +00:00
|
|
|
end
|
2019-09-10 13:29:12 +00:00
|
|
|
|
2019-12-08 14:37:12 +00:00
|
|
|
option :start_after
|
|
|
|
option :dry_run, type: :boolean, default: false
|
|
|
|
desc 'remove-orphans', 'Scan storage and check for files that do not belong to existing media attachments'
|
|
|
|
long_desc <<~LONG_DESC
|
|
|
|
Scans file storage for files that do not belong to existing media attachments. Because this operation
|
|
|
|
requires iterating over every single file individually, it will be slow.
|
|
|
|
|
|
|
|
Please mind that some storage providers charge for the necessary API requests to list objects.
|
|
|
|
LONG_DESC
|
|
|
|
def remove_orphans
|
|
|
|
progress = create_progress_bar(nil)
|
|
|
|
reclaimed_bytes = 0
|
|
|
|
removed = 0
|
|
|
|
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
|
|
|
|
|
|
|
|
case Paperclip::Attachment.default_options[:storage]
|
|
|
|
when :s3
|
|
|
|
paperclip_instance = MediaAttachment.new.file
|
|
|
|
s3_interface = paperclip_instance.s3_interface
|
|
|
|
bucket = s3_interface.bucket(Paperclip::Attachment.default_options[:s3_credentials][:bucket])
|
|
|
|
last_key = options[:start_after]
|
|
|
|
|
|
|
|
loop do
|
2019-12-09 03:26:00 +00:00
|
|
|
objects = begin
|
|
|
|
begin
|
|
|
|
bucket.objects(start_after: last_key, prefix: 'media_attachments/files/').limit(1000).map { |x| x }
|
|
|
|
rescue => e
|
|
|
|
progress.log(pastel.red("Error fetching list of files: #{e}"))
|
|
|
|
progress.log("If you want to continue from this point, add --start-after=#{last_key} to your command") if last_key
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
2019-12-08 14:37:12 +00:00
|
|
|
|
|
|
|
break if objects.empty?
|
|
|
|
|
|
|
|
last_key = objects.last.key
|
|
|
|
attachments_map = MediaAttachment.where(id: objects.map { |object| object.key.split('/')[2..-2].join.to_i }).each_with_object({}) { |attachment, map| map[attachment.id] = attachment }
|
|
|
|
|
|
|
|
objects.each do |object|
|
|
|
|
attachment_id = object.key.split('/')[2..-2].join.to_i
|
|
|
|
filename = object.key.split('/').last
|
|
|
|
|
|
|
|
progress.increment
|
|
|
|
|
|
|
|
next unless attachments_map[attachment_id].nil? || !attachments_map[attachment_id].variant?(filename)
|
|
|
|
|
2019-12-09 03:26:00 +00:00
|
|
|
begin
|
|
|
|
object.delete unless options[:dry_run]
|
|
|
|
|
|
|
|
reclaimed_bytes += object.size
|
|
|
|
removed += 1
|
|
|
|
|
|
|
|
progress.log("Found and removed orphan: #{object.key}")
|
|
|
|
rescue => e
|
|
|
|
progress.log(pastel.red("Error processing #{object.key}: #{e}"))
|
|
|
|
end
|
2019-12-08 14:37:12 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
when :fog
|
|
|
|
say('The fog storage driver is not supported for this operation at this time', :red)
|
|
|
|
exit(1)
|
|
|
|
when :filesystem
|
|
|
|
require 'find'
|
|
|
|
|
|
|
|
root_path = ENV.fetch('RAILS_ROOT_PATH', File.join(':rails_root', 'public', 'system')).gsub(':rails_root', Rails.root.to_s)
|
|
|
|
|
|
|
|
Find.find(File.join(root_path, 'media_attachments', 'files')) do |path|
|
|
|
|
next if File.directory?(path)
|
|
|
|
|
|
|
|
key = path.gsub("#{root_path}#{File::SEPARATOR}", '')
|
|
|
|
attachment_id = key.split(File::SEPARATOR)[2..-2].join.to_i
|
|
|
|
filename = key.split(File::SEPARATOR).last
|
|
|
|
attachment = MediaAttachment.find_by(id: attachment_id)
|
|
|
|
|
|
|
|
progress.increment
|
|
|
|
|
|
|
|
next unless attachment.nil? || !attachment.variant?(filename)
|
|
|
|
|
2019-12-09 03:26:00 +00:00
|
|
|
begin
|
|
|
|
size = File.size(path)
|
|
|
|
|
|
|
|
File.delete(path) unless options[:dry_run]
|
|
|
|
|
|
|
|
reclaimed_bytes += size
|
|
|
|
removed += 1
|
|
|
|
|
|
|
|
progress.log("Found and removed orphan: #{key}")
|
|
|
|
rescue => e
|
|
|
|
progress.log(pastel.red("Error processing #{key}: #{e}"))
|
|
|
|
end
|
2019-12-08 14:37:12 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
progress.total = progress.progress
|
|
|
|
progress.finish
|
|
|
|
|
|
|
|
say("Removed #{removed} orphans (approx. #{number_to_human_size(reclaimed_bytes)})#{dry_run}", :green, true)
|
|
|
|
end
|
|
|
|
|
2019-09-10 13:29:12 +00:00
|
|
|
option :account, type: :string
|
|
|
|
option :domain, type: :string
|
|
|
|
option :status, type: :numeric
|
|
|
|
option :concurrency, type: :numeric, default: 5, aliases: [:c]
|
|
|
|
option :verbose, type: :boolean, default: false, aliases: [:v]
|
|
|
|
option :dry_run, type: :boolean, default: false
|
2019-10-08 03:59:10 +00:00
|
|
|
option :force, type: :boolean, default: false
|
2019-09-10 13:29:12 +00:00
|
|
|
desc 'refresh', 'Fetch remote media files'
|
|
|
|
long_desc <<-DESC
|
|
|
|
Re-downloads media attachments from other servers. You must specify the
|
|
|
|
source of media attachments with one of the following options:
|
|
|
|
|
|
|
|
Use the --status option to download attachments from a specific status,
|
|
|
|
using the status local numeric ID.
|
|
|
|
|
|
|
|
Use the --account option to download attachments from a specific account,
|
|
|
|
using username@domain handle of the account.
|
|
|
|
|
|
|
|
Use the --domain option to download attachments from a specific domain.
|
2019-10-08 03:59:10 +00:00
|
|
|
|
|
|
|
By default, attachments that are believed to be already downloaded will
|
|
|
|
not be re-downloaded. To force re-download of every URL, use --force.
|
2019-09-10 13:29:12 +00:00
|
|
|
DESC
|
|
|
|
def refresh
|
|
|
|
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
|
|
|
|
|
|
|
|
if options[:status]
|
|
|
|
scope = MediaAttachment.where(status_id: options[:status])
|
|
|
|
elsif options[:account]
|
|
|
|
username, domain = username.split('@')
|
|
|
|
account = Account.find_remote(username, domain)
|
|
|
|
|
|
|
|
if account.nil?
|
|
|
|
say('No such account', :red)
|
|
|
|
exit(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
scope = MediaAttachment.where(account_id: account.id)
|
|
|
|
elsif options[:domain]
|
|
|
|
scope = MediaAttachment.joins(:account).merge(Account.by_domain_and_subdomains(options[:domain]))
|
|
|
|
else
|
|
|
|
exit(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
processed, aggregate = parallelize_with_progress(scope) do |media_attachment|
|
2019-10-08 03:59:10 +00:00
|
|
|
next if media_attachment.remote_url.blank? || (!options[:force] && media_attachment.file_file_name.present?)
|
2019-09-10 13:29:12 +00:00
|
|
|
|
|
|
|
unless options[:dry_run]
|
|
|
|
media_attachment.reset_file!
|
|
|
|
media_attachment.save
|
|
|
|
end
|
|
|
|
|
|
|
|
media_attachment.file_file_size
|
|
|
|
end
|
|
|
|
|
|
|
|
say("Downloaded #{processed} media attachments (approx. #{number_to_human_size(aggregate)})#{dry_run}", :green, true)
|
|
|
|
end
|
2019-10-07 18:04:56 +00:00
|
|
|
|
|
|
|
desc 'usage', 'Calculate disk space consumed by Mastodon'
|
|
|
|
def usage
|
|
|
|
say("Attachments:\t#{number_to_human_size(MediaAttachment.sum(:file_file_size))} (#{number_to_human_size(MediaAttachment.where(account: Account.local).sum(:file_file_size))} local)")
|
|
|
|
say("Custom emoji:\t#{number_to_human_size(CustomEmoji.sum(:image_file_size))} (#{number_to_human_size(CustomEmoji.local.sum(:image_file_size))} local)")
|
|
|
|
say("Preview cards:\t#{number_to_human_size(PreviewCard.sum(:image_file_size))}")
|
|
|
|
say("Avatars:\t#{number_to_human_size(Account.sum(:avatar_file_size))} (#{number_to_human_size(Account.local.sum(:avatar_file_size))} local)")
|
|
|
|
say("Headers:\t#{number_to_human_size(Account.sum(:header_file_size))} (#{number_to_human_size(Account.local.sum(:header_file_size))} local)")
|
|
|
|
say("Backups:\t#{number_to_human_size(Backup.sum(:dump_file_size))}")
|
|
|
|
say("Imports:\t#{number_to_human_size(Import.sum(:data_file_size))}")
|
|
|
|
say("Settings:\t#{number_to_human_size(SiteUpload.sum(:file_file_size))}")
|
|
|
|
end
|
2019-11-04 11:55:20 +00:00
|
|
|
|
|
|
|
desc 'lookup', 'Lookup where media is displayed by passing a media URL'
|
|
|
|
def lookup
|
|
|
|
prompt = TTY::Prompt.new
|
|
|
|
|
|
|
|
url = prompt.ask('Please enter a URL to the media to lookup:', required: true)
|
|
|
|
|
|
|
|
attachment_id = url
|
|
|
|
.split('/')[0..-2]
|
|
|
|
.grep(/\A\d+\z/)
|
|
|
|
.join('')
|
|
|
|
|
|
|
|
if url.split('/')[0..-2].include? 'media_attachments'
|
|
|
|
model = MediaAttachment.find(attachment_id).status
|
|
|
|
prompt.say(ActivityPub::TagManager.instance.url_for(model))
|
|
|
|
elsif url.split('/')[0..-2].include? 'accounts'
|
|
|
|
model = Account.find(attachment_id)
|
|
|
|
prompt.say(ActivityPub::TagManager.instance.url_for(model))
|
|
|
|
else
|
|
|
|
prompt.say('Not found')
|
|
|
|
end
|
|
|
|
end
|
2018-08-25 11:25:39 +00:00
|
|
|
end
|
|
|
|
end
|