mirror of
https://gitlab.com/gitlab-org/gitlab-foss.git
synced 2025-08-20 14:11:11 +00:00
Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
@ -57,10 +57,6 @@ module Backup
|
||||
}.merge(Gitlab::GitalyClient.connection_data(repository.storage)).to_json)
|
||||
end
|
||||
|
||||
def parallel_enqueue?
|
||||
false
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def started?
|
||||
|
@ -1,132 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Backup
|
||||
# Backup and restores repositories using the gitaly RPC
|
||||
class GitalyRpcBackup
|
||||
def initialize(progress)
|
||||
@progress = progress
|
||||
end
|
||||
|
||||
def start(type)
|
||||
raise Error, 'already started' if @type
|
||||
|
||||
@type = type
|
||||
case type
|
||||
when :create
|
||||
FileUtils.rm_rf(backup_repos_path)
|
||||
FileUtils.mkdir_p(Gitlab.config.backup.path)
|
||||
FileUtils.mkdir(backup_repos_path, mode: 0700)
|
||||
when :restore
|
||||
# no op
|
||||
else
|
||||
raise Error, "unknown backup type: #{type}"
|
||||
end
|
||||
end
|
||||
|
||||
def wait
|
||||
@type = nil
|
||||
end
|
||||
|
||||
def enqueue(container, repository_type)
|
||||
backup_restore = BackupRestore.new(
|
||||
progress,
|
||||
repository_type.repository_for(container),
|
||||
backup_repos_path
|
||||
)
|
||||
|
||||
case @type
|
||||
when :create
|
||||
backup_restore.backup
|
||||
when :restore
|
||||
backup_restore.restore(always_create: repository_type.project?)
|
||||
else
|
||||
raise Error, 'not started'
|
||||
end
|
||||
end
|
||||
|
||||
def parallel_enqueue?
|
||||
true
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :progress
|
||||
|
||||
def backup_repos_path
|
||||
@backup_repos_path ||= File.join(Gitlab.config.backup.path, 'repositories')
|
||||
end
|
||||
|
||||
class BackupRestore
|
||||
attr_accessor :progress, :repository, :backup_repos_path
|
||||
|
||||
def initialize(progress, repository, backup_repos_path)
|
||||
@progress = progress
|
||||
@repository = repository
|
||||
@backup_repos_path = backup_repos_path
|
||||
end
|
||||
|
||||
def backup
|
||||
progress.puts " * #{display_repo_path} ... "
|
||||
|
||||
if repository.empty?
|
||||
progress.puts " * #{display_repo_path} ... " + "[EMPTY] [SKIPPED]".color(:cyan)
|
||||
return
|
||||
end
|
||||
|
||||
FileUtils.mkdir_p(repository_backup_path)
|
||||
|
||||
repository.bundle_to_disk(path_to_bundle)
|
||||
repository.gitaly_repository_client.backup_custom_hooks(custom_hooks_tar)
|
||||
|
||||
progress.puts " * #{display_repo_path} ... " + "[DONE]".color(:green)
|
||||
|
||||
rescue StandardError => e
|
||||
progress.puts "[Failed] backing up #{display_repo_path}".color(:red)
|
||||
progress.puts "Error #{e}".color(:red)
|
||||
end
|
||||
|
||||
def restore(always_create: false)
|
||||
progress.puts " * #{display_repo_path} ... "
|
||||
|
||||
repository.remove rescue nil
|
||||
|
||||
if File.exist?(path_to_bundle)
|
||||
repository.create_from_bundle(path_to_bundle)
|
||||
restore_custom_hooks
|
||||
elsif always_create
|
||||
repository.create_repository
|
||||
end
|
||||
|
||||
progress.puts " * #{display_repo_path} ... " + "[DONE]".color(:green)
|
||||
|
||||
rescue StandardError => e
|
||||
progress.puts "[Failed] restoring #{display_repo_path}".color(:red)
|
||||
progress.puts "Error #{e}".color(:red)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def display_repo_path
|
||||
"#{repository.full_path} (#{repository.disk_path})"
|
||||
end
|
||||
|
||||
def repository_backup_path
|
||||
@repository_backup_path ||= File.join(backup_repos_path, repository.disk_path)
|
||||
end
|
||||
|
||||
def path_to_bundle
|
||||
@path_to_bundle ||= File.join(backup_repos_path, repository.disk_path + '.bundle')
|
||||
end
|
||||
|
||||
def restore_custom_hooks
|
||||
return unless File.exist?(custom_hooks_tar)
|
||||
|
||||
repository.gitaly_repository_client.restore_custom_hooks(custom_hooks_tar)
|
||||
end
|
||||
|
||||
def custom_hooks_tar
|
||||
File.join(repository_backup_path, "custom_hooks.tar")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
@ -9,36 +9,10 @@ module Backup
|
||||
@strategy = strategy
|
||||
end
|
||||
|
||||
def dump(max_concurrency:, max_storage_concurrency:)
|
||||
def dump
|
||||
strategy.start(:create)
|
||||
enqueue_consecutive
|
||||
|
||||
# gitaly-backup is designed to handle concurrency on its own. So we want
|
||||
# to avoid entering the buggy concurrency code here when gitaly-backup
|
||||
# is enabled.
|
||||
if (max_concurrency <= 1 && max_storage_concurrency <= 1) || !strategy.parallel_enqueue?
|
||||
return enqueue_consecutive
|
||||
end
|
||||
|
||||
check_valid_storages!
|
||||
|
||||
semaphore = Concurrent::Semaphore.new(max_concurrency)
|
||||
errors = Queue.new
|
||||
|
||||
threads = Gitlab.config.repositories.storages.keys.map do |storage|
|
||||
Thread.new do
|
||||
Rails.application.executor.wrap do
|
||||
enqueue_storage(storage, semaphore, max_storage_concurrency: max_storage_concurrency)
|
||||
rescue StandardError => e
|
||||
errors << e
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
|
||||
threads.each(&:join)
|
||||
end
|
||||
|
||||
raise errors.pop unless errors.empty?
|
||||
ensure
|
||||
strategy.wait
|
||||
end
|
||||
@ -58,18 +32,6 @@ module Backup
|
||||
|
||||
attr_reader :progress, :strategy
|
||||
|
||||
def check_valid_storages!
|
||||
repository_storage_klasses.each do |klass|
|
||||
if klass.excluding_repository_storage(Gitlab.config.repositories.storages.keys).exists?
|
||||
raise Error, "repositories.storages in gitlab.yml does not include all storages used by #{klass}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def repository_storage_klasses
|
||||
[ProjectRepository, SnippetRepository]
|
||||
end
|
||||
|
||||
def enqueue_consecutive
|
||||
enqueue_consecutive_projects
|
||||
enqueue_consecutive_snippets
|
||||
@ -85,50 +47,6 @@ module Backup
|
||||
Snippet.find_each(batch_size: 1000) { |snippet| enqueue_snippet(snippet) }
|
||||
end
|
||||
|
||||
def enqueue_storage(storage, semaphore, max_storage_concurrency:)
|
||||
errors = Queue.new
|
||||
queue = InterlockSizedQueue.new(1)
|
||||
|
||||
threads = Array.new(max_storage_concurrency) do
|
||||
Thread.new do
|
||||
Rails.application.executor.wrap do
|
||||
while container = queue.pop
|
||||
ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
|
||||
semaphore.acquire
|
||||
end
|
||||
|
||||
begin
|
||||
enqueue_container(container)
|
||||
rescue StandardError => e
|
||||
errors << e
|
||||
break
|
||||
ensure
|
||||
semaphore.release
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
enqueue_records_for_storage(storage, queue, errors)
|
||||
|
||||
raise errors.pop unless errors.empty?
|
||||
ensure
|
||||
queue.close
|
||||
ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
|
||||
threads.each(&:join)
|
||||
end
|
||||
end
|
||||
|
||||
def enqueue_container(container)
|
||||
case container
|
||||
when Project
|
||||
enqueue_project(container)
|
||||
when Snippet
|
||||
enqueue_snippet(container)
|
||||
end
|
||||
end
|
||||
|
||||
def enqueue_project(project)
|
||||
strategy.enqueue(project, Gitlab::GlRepository::PROJECT)
|
||||
strategy.enqueue(project, Gitlab::GlRepository::WIKI)
|
||||
@ -139,32 +57,10 @@ module Backup
|
||||
strategy.enqueue(snippet, Gitlab::GlRepository::SNIPPET)
|
||||
end
|
||||
|
||||
def enqueue_records_for_storage(storage, queue, errors)
|
||||
records_to_enqueue(storage).each do |relation|
|
||||
relation.find_each(batch_size: 100) do |project|
|
||||
break unless errors.empty?
|
||||
|
||||
queue.push(project)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def records_to_enqueue(storage)
|
||||
[projects_in_storage(storage), snippets_in_storage(storage)]
|
||||
end
|
||||
|
||||
def projects_in_storage(storage)
|
||||
project_relation.id_in(ProjectRepository.for_repository_storage(storage).select(:project_id))
|
||||
end
|
||||
|
||||
def project_relation
|
||||
Project.includes(:route, :group, namespace: :owner)
|
||||
end
|
||||
|
||||
def snippets_in_storage(storage)
|
||||
Snippet.id_in(SnippetRepository.for_repository_storage(storage).select(:snippet_id))
|
||||
end
|
||||
|
||||
def restore_object_pools
|
||||
PoolRepository.includes(:source_project).find_each do |pool|
|
||||
progress.puts " - Object pool #{pool.disk_path}..."
|
||||
@ -199,24 +95,6 @@ module Backup
|
||||
|
||||
Snippet.id_in(invalid_snippets).delete_all
|
||||
end
|
||||
|
||||
class InterlockSizedQueue < SizedQueue
|
||||
extend ::Gitlab::Utils::Override
|
||||
|
||||
override :pop
|
||||
def pop(*)
|
||||
ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
override :push
|
||||
def push(*)
|
||||
ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
|
||||
super
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
Reference in New Issue
Block a user