Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot
2024-10-31 03:14:13 +00:00
parent 747345dee0
commit deba779683
78 changed files with 1662 additions and 323 deletions

View File

@ -20,6 +20,7 @@ module Gitlab
autoload :GitlabConfig, 'gitlab/backup/cli/gitlab_config'
autoload :Metadata, 'gitlab/backup/cli/metadata'
autoload :Output, 'gitlab/backup/cli/output'
autoload :RepoType, 'gitlab/backup/cli/repo_type'
autoload :RestoreExecutor, 'gitlab/backup/cli/restore_executor'
autoload :Runner, 'gitlab/backup/cli/runner'
autoload :Shell, 'gitlab/backup/cli/shell'

View File

@ -51,20 +51,17 @@ module Gitlab
end
def execute_all_tasks
# TODO: when we migrate targets to the new codebase, recreate options to have only what we need here
# https://gitlab.com/gitlab-org/gitlab/-/issues/454906
options = ::Backup::Options.new(
remote_directory: backup_bucket,
container_registry_bucket: registry_bucket,
service_account_file: service_account_file
)
tasks = []
Gitlab::Backup::Cli::Tasks.build_each(context: context, options: options) do |task|
Gitlab::Backup::Cli::Tasks.build_each(context: context) do |task|
# This is a temporary hack while we move away from options and use config instead
# This hack will be removed as part of https://gitlab.com/gitlab-org/gitlab/-/issues/498455
task.set_registry_bucket(registry_bucket) if task.is_a?(Gitlab::Backup::Cli::Tasks::Registry)
Gitlab::Backup::Cli::Output.info("Executing Backup of #{task.human_name}...")
duration = measure_duration do
task.backup!(workdir, metadata.backup_id)
task.backup!(workdir)
tasks << task
end

View File

@ -104,7 +104,7 @@ module Gitlab
end
def config(object_type)
Gitlab.config[object_type]
gitlab_config[object_type]
end
def env
@ -112,6 +112,18 @@ module Gitlab
ENV["RAILS_ENV"].presence || ENV["RACK_ENV"].presence || "development")
end
def config_repositories_storages
gitlab_config.dig(env, 'repositories', 'storages')
end
def gitaly_backup_path
gitlab_config.dig(env, 'backup', 'gitaly_backup_path')
end
def gitaly_token
gitlab_config.dig(env, 'gitaly', 'token')
end
private
# Return the shared path used as a fallback base location to each blob type

View File

@ -6,6 +6,8 @@ module Gitlab
module Errors
autoload :DatabaseBackupError, 'gitlab/backup/cli/errors/database_backup_error'
autoload :FileBackupError, 'gitlab/backup/cli/errors/file_backup_error'
autoload :FileRestoreError, 'gitlab/backup/cli/errors/file_restore_error'
autoload :GitalyBackupError, 'gitlab/backup/cli/errors/gitaly_backup_error'
end
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Errors
class FileRestoreError < StandardError
attr_reader :error_message
def initialize(error_message:)
super
@error_message = error_message
end
def message
"Restore operation failed: #{error_message}"
end
end
end
end
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Errors
class GitalyBackupError < StandardError
attr_reader :error_message
def initialize(error_message = '')
super
@error_message = error_message
end
def message
"Repository Backup/Restore failed. #{error_message}"
end
end
end
end
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
class RepoType
PROJECT = :project
WIKI = :wiki
SNIPPET = :snippet
DESIGN = :design
end
end
end
end

View File

@ -41,10 +41,6 @@ module Gitlab
execute_all_tasks
end
def backup_options
@backup_options ||= build_backup_options!
end
def metadata
@metadata ||= read_metadata!
end
@ -57,14 +53,16 @@ module Gitlab
private
def execute_all_tasks
# TODO: when we migrate targets to the new codebase, recreate options to have only what we need here
# https://gitlab.com/gitlab-org/gitlab/-/issues/454906
tasks = []
Gitlab::Backup::Cli::Tasks.build_each(context: context, options: backup_options) do |task|
Gitlab::Backup::Cli::Tasks.build_each(context: context) do |task|
# This is a temporary hack while we move away from options and use config instead
# This hack will be removed as part of https://gitlab.com/gitlab-org/gitlab/-/issues/498455
task.set_registry_bucket(registry_bucket) if task.is_a?(Gitlab::Backup::Cli::Tasks::Registry)
Gitlab::Backup::Cli::Output.info("Executing restoration of #{task.human_name}...")
duration = measure_duration do
tasks << { name: task.human_name, result: task.restore!(archive_directory, backup_id) }
tasks << { name: task.human_name, result: task.restore!(archive_directory) }
end
next if task.object_storage?
@ -87,15 +85,6 @@ module Gitlab
@metadata = Gitlab::Backup::Cli::Metadata::BackupMetadata.load!(archive_directory)
end
def build_backup_options!
::Backup::Options.new(
backup_id: backup_id,
remote_directory: backup_bucket,
container_registry_bucket: registry_bucket,
service_account_file: service_account_file
)
end
# @return [Pathname] temporary directory
def create_temporary_workdir!
# Ensure base directory exists

View File

@ -6,7 +6,11 @@ module Gitlab
module Targets
autoload :Target, 'gitlab/backup/cli/targets/target'
autoload :Database, 'gitlab/backup/cli/targets/database'
autoload :Files, 'gitlab/backup/cli/targets/files'
autoload :ObjectStorage, 'gitlab/backup/cli/targets/object_storage'
autoload :GitalyBackup, 'gitlab/backup/cli/targets/gitaly_backup'
autoload :GitalyClient, 'gitlab/backup/cli/targets/gitaly_client'
autoload :Repositories, 'gitlab/backup/cli/targets/repositories'
end
end
end

View File

@ -17,14 +17,16 @@ module Gitlab
].freeze
IGNORED_ERRORS_REGEXP = Regexp.union(IGNORED_ERRORS).freeze
def initialize(options:)
super(options: options)
def initialize
@errors = []
@force = options.force?
# This flag will be removed as part of https://gitlab.com/gitlab-org/gitlab/-/issues/494209
# This option will be reintroduced as part of
# https://gitlab.com/gitlab-org/gitlab/-/issues/498453
@force = false
end
def dump(destination_dir, _)
def dump(destination_dir)
FileUtils.mkdir_p(destination_dir)
each_database(destination_dir) do |backup_connection|
@ -74,7 +76,7 @@ module Gitlab
end
end
def restore(destination_dir, _)
def restore(destination_dir)
@errors = []
base_models_for_backup.each do |database_name, _|

View File

@ -0,0 +1,103 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Targets
class Files < Target
DEFAULT_EXCLUDE = ['lost+found'].freeze
attr_reader :excludes
# @param [String] storage_path
# @param [Array] excludes
def initialize(context, storage_path, excludes: [])
super(context)
@storage_path = storage_path
@excludes = excludes
end
def dump(destination)
archive_file = [destination, 'w', 0o600]
tar_command = Utils::Tar.new.pack_from_stdin_cmd(
target_directory: storage_realpath,
target: '.',
excludes: excludes)
compression_cmd = Utils::Compression.compression_command
pipeline = Shell::Pipeline.new(tar_command, compression_cmd)
result = pipeline.run!(output: archive_file)
return if success?(result)
raise Errors::FileBackupError.new(storage_realpath, destination)
end
def restore(source)
# Existing files will be handled in https://gitlab.com/gitlab-org/gitlab/-/issues/499876
if File.exist?(storage_realpath)
Output.warning "Ignoring existing files at #{storage_realpath} and continuing restore."
end
archive_file = source.to_s
tar_command = Utils::Tar.new.extract_from_stdin_cmd(target_directory: storage_realpath)
decompression_cmd = Utils::Compression.decompression_command
pipeline = Shell::Pipeline.new(decompression_cmd, tar_command)
result = pipeline.run!(input: archive_file)
return if success?(result)
raise Errors::FileRestoreError.new(error_message: result.stderr)
end
private
def success?(result)
return true if result.success?
return true if ignore_non_success?(
result.status_list[1].exitstatus,
result.stderr
)
false
end
def noncritical_warning_matcher
/^g?tar: \.: Cannot mkdir: No such file or directory$/
end
def ignore_non_success?(exitstatus, output)
# tar can exit with nonzero code:
# 1 - if some files changed (i.e. a CI job is currently writes to log)
# 2 - if it cannot create `.` directory (see issue https://gitlab.com/gitlab-org/gitlab/-/issues/22442)
# http://www.gnu.org/software/tar/manual/html_section/tar_19.html#Synopsis
# so check tar status 1 or stderr output against some non-critical warnings
if exitstatus == 1
Output.print_info "Ignoring tar exit status 1 'Some files differ': #{output}"
return true
end
# allow tar to fail with other non-success status if output contain non-critical warning
if noncritical_warning_matcher&.match?(output)
Output.print_info(
"Ignoring non-success exit status #{exitstatus} due to output of non-critical warning(s): #{output}")
return true
end
false
end
def storage_realpath
@storage_realpath ||= File.realpath(@storage_path)
end
end
end
end
end
end

View File

@ -0,0 +1,187 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Targets
class GitalyBackup
# Backup and restores repositories using gitaly-backup
#
# gitaly-backup can work in parallel and accepts a list of repositories
# through input pipe using a specific json format for both backup and restore
attr_reader :context
def initialize(context)
@context = context
end
def start(type, backup_repos_path, backup_id: nil, remove_all_repositories: nil)
raise Gitlab::Backup::Cli::Errors::GitalyBackupError, 'already started' if started?
FileUtils.rm_rf(backup_repos_path) if type == :create
@input_stream, stdout, @thread = Open3.popen2(
build_env,
bin_path,
*gitaly_backup_args(type, backup_repos_path.to_s, backup_id, remove_all_repositories)
)
@out_reader = Thread.new do
IO.copy_stream(stdout, $stdout)
end
end
def finish!
return unless started?
@input_stream.close
@thread.join
status = @thread.value
@thread = nil
return unless status.exitstatus != 0
raise Gitlab::Backup::Cli::Errors::GitalyBackupError,
"gitaly-backup exit status #{status.exitstatus}"
end
def enqueue(container, repo_type)
raise Gitlab::Backup::Cli::Errors::GitalyBackupError, 'not started' unless started?
raise Gitlab::Backup::Cli::Errors::GitalyBackupError, 'no container for repo type' unless container
storage, relative_path, gl_project_path, always_create = repository_info_for(container, repo_type)
schedule_backup_job(storage, relative_path, gl_project_path, always_create)
end
private
def repository_info_for(container, repo_type)
case repo_type
when RepoType::PROJECT
[container.repository_storage,
container.disk_path || container.full_path,
container.full_path,
true]
when RepoType::WIKI
wiki_repo_info(container)
when RepoType::SNIPPET
[container.repository_storage,
container.disk_path || container.full_path,
container.full_path,
false]
when RepoType::DESIGN
[design_repo_storage(container),
container.project.disk_path,
container.project.full_path,
false]
end
end
def design_repo_storage(container)
return container.repository.repository_storage if container.repository.respond_to?(:repository_storage)
container.repository_storage
end
def wiki_repo_info(container)
wiki = container.respond_to?(:wiki) ? container.wiki : container
[wiki.repository_storage,
wiki.disk_path || wiki.full_path,
wiki.full_path,
false]
end
def gitaly_backup_args(type, backup_repos_path, backup_id, remove_all_repositories)
command = case type
when :create
'create'
when :restore
'restore'
else
raise Gitlab::Backup::Cli::Errors::GitalyBackupError, "unknown backup type: #{type}"
end
args = [command] + ['-path', backup_repos_path, '-layout', 'manifest']
case type
when :create
args += ['-id', backup_id] if backup_id
when :restore
args += ['-remove-all-repositories', remove_all_repositories.join(',')] if remove_all_repositories
args += ['-id', backup_id] if backup_id
end
args
end
# Schedule a new backup job through a non-blocking JSON based pipe protocol
#
# @see https://gitlab.com/gitlab-org/gitaly/-/blob/master/doc/gitaly-backup.md
def schedule_backup_job(storage, relative_path, gl_project_path, always_create)
json_job = {
storage_name: storage,
relative_path: relative_path,
gl_project_path: gl_project_path,
always_create: always_create
}.to_json
@input_stream.puts(json_job)
end
def gitaly_servers
storages = context.config_repositories_storages
unless storages.keys
raise Gitlab::Backup::Cli::Errors::GitalyBackupError,
"No repositories' storages found."
end
storages.keys.index_with do |storage_name|
GitalyClient.new(storages, context.gitaly_token).connection_data(storage_name)
end
end
def gitaly_servers_encoded
Base64.strict_encode64(JSON.dump(gitaly_servers))
end
# These variables will be moved to a config file via
# https://gitlab.com/gitlab-org/gitlab/-/issues/500437
def default_cert_dir
ENV.fetch('SSL_CERT_DIR', OpenSSL::X509::DEFAULT_CERT_DIR)
end
def default_cert_file
ENV.fetch('SSL_CERT_FILE', OpenSSL::X509::DEFAULT_CERT_FILE)
end
def build_env
{
'SSL_CERT_FILE' => default_cert_file,
'SSL_CERT_DIR' => default_cert_dir,
'GITALY_SERVERS' => gitaly_servers_encoded
}.merge(current_env)
end
def current_env
ENV
end
def started?
@thread.present?
end
def bin_path
unless context.gitaly_backup_path.present?
raise Gitlab::Backup::Cli::Errors::GitalyBackupError,
'gitaly-backup binary not found and gitaly_backup_path is not configured'
end
File.absolute_path(context.gitaly_backup_path)
end
end
end
end
end
end

View File

@ -0,0 +1,42 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Targets
class GitalyClient
attr_reader :storages, :gitaly_token
def initialize(storages, gitaly_token)
@storages = storages
@gitaly_token = gitaly_token
end
def connection_data(storage)
raise "storage not found: #{storage.inspect}" if storages[storage].nil?
{ 'address' => address(storage), 'token' => token(storage) }
end
private
def address(storage)
address = storages[storage]['gitaly_address']
raise "storage #{storage.inspect} is missing a gitaly_address" unless address.present?
unless %w[tcp unix tls dns].include?(URI(address).scheme)
raise "Unsupported Gitaly address: " \
"#{address.inspect} does not use URL scheme 'tcp' or 'unix' or 'tls' or 'dns'"
end
address
end
def token(storage)
storages[storage]['gitaly_token'].presence || gitaly_token
end
end
end
end
end
end

View File

@ -12,14 +12,15 @@ module Gitlab
attr_accessor :object_type, :backup_bucket, :client, :config, :results
def initialize(object_type, options, config)
def initialize(object_type, remote_directory, config)
@object_type = object_type
@backup_bucket = options.remote_directory
@backup_bucket = remote_directory
@config = config
@client = ::Google::Cloud::StorageTransfer.storage_transfer_service
end
def dump(_, backup_id)
# @param [String] backup_id unique identifier for the backup
def dump(backup_id)
response = find_or_create_job(backup_id, "backup")
run_request = {
project_id: backup_job_spec(backup_id)[:project_id],
@ -28,7 +29,8 @@ module Gitlab
@results = client.run_transfer_job run_request
end
def restore(_, backup_id)
# @param [String] backup_id unique identifier for the backup
def restore(backup_id)
response = find_or_create_job(backup_id, "restore")
run_request = {
project_id: restore_job_spec(backup_id)[:project_id],

View File

@ -0,0 +1,96 @@
# frozen_string_literal: true
require 'yaml'
module Gitlab
module Backup
module Cli
module Targets
# Backup and restores repositories by querying the database
class Repositories < Target
def dump(destination)
strategy.start(:create, destination)
enqueue_consecutive
ensure
strategy.finish!
end
def restore(source)
strategy.start(:restore,
source,
remove_all_repositories: remove_all_repositories)
enqueue_consecutive
ensure
strategy.finish!
restore_object_pools
end
def strategy
@strategy ||= GitalyBackup.new(context)
end
private
def remove_all_repositories
context.config_repositories_storages.keys
end
def enqueue_consecutive
enqueue_consecutive_projects
enqueue_consecutive_snippets
end
def enqueue_consecutive_projects
project_relation.find_each(batch_size: 1000) do |project|
enqueue_project(project)
end
end
def enqueue_consecutive_snippets
snippet_relation.find_each(batch_size: 1000) { |snippet| enqueue_snippet(snippet) }
end
def enqueue_project(project)
strategy.enqueue(project, Gitlab::Backup::Cli::RepoType::PROJECT)
strategy.enqueue(project, Gitlab::Backup::Cli::RepoType::WIKI)
return unless project.design_management_repository
strategy.enqueue(project.design_management_repository, Gitlab::Backup::Cli::RepoType::DESIGN)
end
def enqueue_snippet(snippet)
strategy.enqueue(snippet, Gitlab::Backup::Cli::RepoType::SNIPPET)
end
def project_relation
Project.includes(:route, :group, :namespace)
end
def snippet_relation
Snippet.all
end
def restore_object_pools
PoolRepository.includes(:source_project).find_each do |pool|
Output.info " - Object pool #{pool.disk_path}..."
unless pool.source_project
Output.info " - Object pool #{pool.disk_path}... [SKIPPED]"
next
end
pool.state = 'none'
pool.save
pool.schedule
end
end
end
end
end
end
end

View File

@ -6,14 +6,10 @@ module Gitlab
module Targets
# Abstract class used to implement a Backup Target
class Target
# Backup creation and restore option flags
#
# TODO: Migrate to a unified backup specific Options implementation
# @return [::Backup::Options]
attr_reader :options
attr_reader :context
def initialize(options:)
@options = options
def initialize(context = nil)
@context = context
end
def asynchronous?
@ -23,13 +19,12 @@ module Gitlab
# dump task backup to `path`
#
# @param [String] path fully qualified backup task destination
# @param [String] backup_id unique identifier for the backup
def dump(path, backup_id)
def dump(path)
raise NotImplementedError
end
# restore task backup from `path`
def restore(path, backup_id)
def restore(path)
raise NotImplementedError
end
end

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: ['tmp'])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: ['tmp'])
end
def storage_path = context.ci_job_artifacts_path

View File

@ -14,7 +14,7 @@ module Gitlab
private
def target
::Backup::Targets::Files.new(nil, storage_path, options: options)
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path)
end
def storage_path = context.ci_builds_path

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: ['tmp'])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: ['tmp'])
end
def storage_path = context.ci_secure_files_path

View File

@ -16,7 +16,7 @@ module Gitlab
private
def target
::Gitlab::Backup::Cli::Targets::Database.new(options: options)
::Gitlab::Backup::Cli::Targets::Database.new
end
end
end

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options)
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path)
end
def storage_path = context.ci_lfs_path

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: ['tmp'])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: ['tmp'])
end
def storage_path = context.packages_path

View File

@ -18,7 +18,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: [LEGACY_PAGES_TMP_PATH])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: [LEGACY_PAGES_TMP_PATH])
end
def storage_path = context.pages_path

View File

@ -13,16 +13,22 @@ module Gitlab
def destination_path = 'registry.tar.gz'
attr_reader :registry_bucket
def set_registry_bucket(registry_bucket)
@registry_bucket = registry_bucket
end
def object_storage?
!options.container_registry_bucket.nil?
!registry_bucket.nil?
end
# Registry does not use consolidated object storage config.
def config
settings = {
object_store: {
connection: context.config('object_store').connection.to_hash,
remote_directory: options.container_registry_bucket
connection: context.gitlab_config('object_store').connection.to_hash,
remote_directory: registry_bucket
}
}
GitlabSettings::Options.build(settings)
@ -31,7 +37,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options)
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path)
end
def storage_path = context.registry_path

View File

@ -16,24 +16,7 @@ module Gitlab
private
def target
# TODO: migrate to the new codebase and rewrite portions to format output in a readable way
::Backup::Targets::Repositories.new($stdout,
strategy: gitaly_strategy,
options: options,
storages: options.repositories_storages,
paths: options.repositories_paths,
skip_paths: options.skip_repositories_paths
)
end
def gitaly_strategy
# TODO: migrate to the new codebase and rewrite portions to format output in a readable way
::Backup::GitalyBackup.new($stdout,
incremental: options.incremental?,
max_parallelism: options.max_parallelism,
storage_parallelism: options.max_storage_parallelism,
server_side: false
)
Gitlab::Backup::Cli::Targets::Repositories.new(context)
end
end
end

View File

@ -5,36 +5,34 @@ module Gitlab
module Cli
module Tasks
class Task
attr_reader :options, :context
attr_writer :target
attr_reader :context
def initialize(context:)
@context = context
end
# Identifier used as parameter in the CLI to skip from executing
def self.id
raise NotImplementedError
end
def initialize(context:, options:)
@context = context
@options = options
end
# Initiate a backup
#
# @param [Pathname] backup_path a path where to store the backups
# @param [String] backup_id
def backup!(backup_path, backup_id)
def backup!(backup_path)
backup_output = backup_path.join(destination_path)
# During test, we ensure storage exists so we can run against `RAILS_ENV=test` environment
FileUtils.mkdir_p(storage_path) if context.env.test? && respond_to?(:storage_path, true)
FileUtils.mkdir_p(storage_path) if context&.env&.test? && respond_to?(:storage_path, true)
target.dump(backup_output, backup_id)
target.dump(backup_output)
end
def restore!(archive_directory, backup_id)
def restore!(archive_directory)
archived_data_location = Pathname(archive_directory).join(destination_path)
target.restore(archived_data_location, backup_id)
target.restore(archived_data_location)
end
# Key string that identifies the task
@ -70,7 +68,10 @@ module Gitlab
end
def config
context.config(id)
return context.config(id) if context
Output.warning("No context passed to derive configuration from.")
nil
end
def object_storage?

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: ['tmp'])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: ['tmp'])
end
def storage_path = context.terraform_state_path

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: ['tmp'])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: ['tmp'])
end
def storage_path = context.upload_path

View File

@ -49,6 +49,14 @@ module Gitlab
Shell::Command.new(cmd, *tar_args)
end
def pack_from_stdin_cmd(target_directory:, target:, excludes: [])
pack_cmd(
archive_file: '-', # use stdin as list of files
target_directory: target_directory,
target: target,
excludes: excludes)
end
# @param [Object] archive_file
# @param [Object] target_directory
# @return [Gitlab::Backup::Cli::Shell::Command]
@ -64,6 +72,11 @@ module Gitlab
Shell::Command.new(cmd, *tar_args)
end
def extract_from_stdin_cmd(target_directory:)
extract_cmd(archive_file: '-', # use stdin as file source content
target_directory: target_directory)
end
private
def build_exclude_patterns(*patterns)

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.describe Gitlab::Backup::Cli::GitlabConfig do
let(:config_fixture) { fixtures_path.join('gitlab.yml') }
let(:config_fixture) { fixtures_path.join('config/gitlab.yml') }
subject(:gitlab_config) { described_class.new(config_fixture) }

View File

@ -0,0 +1,188 @@
# frozen_string_literal: true
require 'spec_helper'
require 'active_support/testing/time_helpers'
RSpec.describe Gitlab::Backup::Cli::Targets::Files, feature_category: :backup_restore do
include ActiveSupport::Testing::TimeHelpers
let(:status_0) { instance_double(Process::Status, success?: true, exitstatus: 0) }
let(:status_1) { instance_double(Process::Status, success?: false, exitstatus: 1) }
let(:status_2) { instance_double(Process::Status, success?: false, exitstatus: 2) }
let(:pipeline_status_failed) do
Gitlab::Backup::Cli::Shell::Pipeline::Result.new(stderr: 'Cannot mkdir', status_list: [status_1, status_0])
end
let(:tmp_backup_restore_dir) { Dir.mktmpdir('files-target-restore') }
let(:destination) { 'registry.tar.gz' }
let(:context) { Gitlab::Backup::Cli::Context.build }
let!(:workdir) do
FileUtils.mkdir_p(context.backup_basedir)
Pathname(Dir.mktmpdir('backup', context.backup_basedir))
end
let(:restore_target) { File.realpath(tmp_backup_restore_dir) }
let(:backup_target) do
%w[@pages.tmp lost+found @hashed].each do |folder|
path = Pathname(tmp_backup_restore_dir).join(folder, 'something', 'else')
FileUtils.mkdir_p(path)
FileUtils.touch(path.join('artifacts.zip'))
end
File.realpath(tmp_backup_restore_dir)
end
before do
allow(FileUtils).to receive(:mv).and_return(true)
allow(File).to receive(:exist?).and_return(true)
end
after do
FileUtils.rm_rf([restore_target, backup_target, destination], secure: true)
end
describe '#dump' do
subject(:files) do
described_class.new(context, backup_target, excludes: ['@pages.tmp'])
end
it 'raises no errors' do
expect { files.dump(destination) }.not_to raise_error
end
it 'excludes tmp dirs from archive' do
expect_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline) do |pipeline|
tar_cmd = pipeline.shell_commands[0]
expect(tar_cmd.cmd_args).to include('--exclude=lost+found')
expect(tar_cmd.cmd_args).to include('--exclude=./@pages.tmp')
allow(pipeline).to receive(:run!).and_call_original
end
files.dump(destination)
end
it 'raises an error on failure' do
expect_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline::Result) do |result|
expect(result).to receive(:success?).and_return(false)
end
expect do
files.dump(destination)
end.to raise_error(/Failed to create compressed file/)
end
end
describe '#restore' do
let(:source) { File.join(restore_target, 'backup.tar.gz') }
let(:pipeline) { Gitlab::Backup::Cli::Shell::Pipeline.new(Gitlab::Backup::Cli::Shell::Command.new('echo 0')) }
subject(:files) { described_class.new(context, restore_target) }
before do
FileUtils.touch(source)
allow(Gitlab::Backup::Cli::Shell::Pipeline).to receive(:new).and_return(pipeline)
end
context 'when storage path exists' do
before do
allow(File).to receive(:exist?).with(restore_target).and_return(true)
end
it 'logs a warning about existing files' do
expect(Gitlab::Backup::Cli::Output).to receive(:warning).with(/Ignoring existing files/)
files.restore(source)
end
end
context 'when pipeline execution is successful' do
before do
allow_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline::Result) do |result|
allow(result).to receive(:success?).and_return(true)
end
end
it 'does not raise an error' do
expect { files.restore(source) }.not_to raise_error
end
end
context 'when pipeline execution fails' do
before do
allow(files).to receive(:dump).and_return(true)
allow_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline) do |pipeline|
allow(pipeline).to receive(:run!).and_return(pipeline_status_failed)
end
end
it 'raises a FileRestoreError' do
expect { files.restore(source) }.to raise_error(Gitlab::Backup::Cli::Errors::FileRestoreError)
end
end
context 'when pipeline execution has non-critical warnings' do
let(:warning_message) { 'tar: .: Cannot mkdir: No such file or directory' }
before do
allow_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline::Result) do |result|
allow(result).to receive(:success?).and_return(false)
allow(result).to receive(:stderr).and_return(warning_message)
allow(result).to receive(:status_list).and_return([status_0, status_2])
end
end
it 'does not raise an error' do
expect { files.restore(source) }.not_to raise_error
end
end
end
describe '#ignore_non_success?' do
subject(:files) do
described_class.new(context, '/var/gitlab-registry')
end
context 'if `tar` command exits with 1 exitstatus' do
it 'returns true' do
expect(
files.send(:ignore_non_success?, 1, nil)
).to be_truthy
end
it 'outputs a warning' do
expect do
files.send(:ignore_non_success?, 1, nil)
end.to output(/Ignoring tar exit status 1/).to_stdout
end
end
context 'if `tar` command exits with 2 exitstatus with non-critical warning' do
it 'returns true' do
expect(
files.send(:ignore_non_success?, 2, 'gtar: .: Cannot mkdir: No such file or directory')
).to be_truthy
end
it 'outputs a warning' do
expect do
files.send(:ignore_non_success?, 2, 'gtar: .: Cannot mkdir: No such file or directory')
end.to output(/Ignoring non-success exit status/).to_stdout
end
end
context 'if `tar` command exits with any other unlisted error' do
it 'returns false' do
expect(
files.send(:ignore_non_success?, 2, 'unlisted_error')
).to be_falsey
end
end
end
end

View File

@ -0,0 +1,199 @@
# frozen_string_literal: true
require 'spec_helper'
require 'open3'
RSpec.describe Gitlab::Backup::Cli::Targets::GitalyBackup do
let(:context) { Gitlab::Backup::Cli::Context.build }
let(:gitaly_backup) { described_class.new(context) }
describe '#start' do
context 'when creating a backup' do
it 'starts the gitaly-backup process with the correct arguments' do
backup_repos_path = '/path/to/backup/repos'
backup_id = 'abc123'
expected_args = ['create', '-path', backup_repos_path, '-layout', 'manifest', '-id', backup_id]
expect(Open3).to receive(:popen2).with(instance_of(Hash), instance_of(String), *expected_args)
gitaly_backup.start(:create, backup_repos_path, backup_id: backup_id)
end
end
context 'when restoring a backup' do
it 'starts the gitaly-backup process with the correct arguments' do
backup_repos_path = '/path/to/backup/repos'
backup_id = 'abc123'
remove_all_repositories = %w[repo1 repo2]
expected_args = ['restore', '-path', backup_repos_path, '-layout', 'manifest', '-remove-all-repositories',
'repo1,repo2', '-id', backup_id]
expect(Open3).to receive(:popen2).with(instance_of(Hash), instance_of(String), *expected_args)
gitaly_backup.start(:restore, backup_repos_path, backup_id: backup_id,
remove_all_repositories: remove_all_repositories)
end
end
context 'when an invalid type is provided' do
it 'raises an error' do
expect do
gitaly_backup.start(:invalid,
'/path/to/backup/repos')
end.to raise_error(Gitlab::Backup::Cli::Errors::GitalyBackupError, /unknown backup type: invalid/)
end
end
context 'when already started' do
it 'raises an error' do
gitaly_backup.instance_variable_set(:@thread, Thread.new { true })
expect do
gitaly_backup.start(:create,
'/path/to/backup/repos')
end.to raise_error(Gitlab::Backup::Cli::Errors::GitalyBackupError, /already started/)
end
end
end
describe '#finish!' do
context 'when not started' do
it 'returns without raising an error' do
expect { gitaly_backup.finish! }.not_to raise_error
end
end
context 'when started' do
let(:thread) { instance_double('Thread', join: nil, value: instance_double(Process::Status, exitstatus: 0)) }
before do
gitaly_backup.instance_variable_set(:@thread, thread)
gitaly_backup.instance_variable_set(:@input_stream, instance_double('InputStream', close: nil))
end
it 'closes the input stream and joins the thread' do
input_stream = gitaly_backup.instance_variable_get(:@input_stream)
expect(input_stream).to receive(:close)
expect(thread).to receive(:join)
gitaly_backup.finish!
end
context 'when the process exits with a non-zero status' do
let(:thread) { instance_double('Thread', join: nil, value: instance_double(Process::Status, exitstatus: 1)) }
it 'raises an error' do
expect do
gitaly_backup.finish!
end.to raise_error(Gitlab::Backup::Cli::Errors::GitalyBackupError, /gitaly-backup exit status 1/)
end
end
end
end
describe '#enqueue' do
context 'when not started' do
it 'raises an error' do
expect do
gitaly_backup.enqueue(double, :project)
end.to raise_error(Gitlab::Backup::Cli::Errors::GitalyBackupError, /not started/)
end
end
context 'when started' do
let(:input_stream) { instance_double('InputStream', puts: nil) }
before do
gitaly_backup.instance_variable_set(:@input_stream, input_stream)
gitaly_backup.instance_variable_set(:@thread, Thread.new { true })
end
context 'with a project repository' do
let(:container) do
instance_double('Project', repository_storage: 'storage', disk_path: 'disk/path', full_path: 'group/project')
end
it 'schedules a backup job with the correct parameters' do
expected_json = {
storage_name: 'storage',
relative_path: 'disk/path',
gl_project_path: 'group/project',
always_create: true
}.to_json
expect(input_stream).to receive(:puts).with(expected_json)
gitaly_backup.enqueue(container, :project)
end
end
context 'with a wiki repository' do
let(:wiki) do
instance_double('Wiki', repository_storage: 'wiki_storage', disk_path: 'wiki/disk/path',
full_path: 'group/project.wiki')
end
let(:container) { instance_double('Project', wiki: wiki) }
it 'schedules a backup job with the correct parameters' do
expected_json = {
storage_name: 'wiki_storage',
relative_path: 'wiki/disk/path',
gl_project_path: 'group/project.wiki',
always_create: false
}.to_json
expect(input_stream).to receive(:puts).with(expected_json)
gitaly_backup.enqueue(container, :wiki)
end
end
context 'with a snippet repository' do
let(:container) do
instance_double('Snippet', repository_storage: 'storage', disk_path: 'disk/path', full_path: 'snippets/1')
end
it 'schedules a backup job with the correct parameters' do
expected_json = {
storage_name: 'storage',
relative_path: 'disk/path',
gl_project_path: 'snippets/1',
always_create: false
}.to_json
expect(input_stream).to receive(:puts).with(expected_json)
gitaly_backup.enqueue(container, :snippet)
end
end
context 'with a design repository' do
let(:project) { instance_double('Project', disk_path: 'disk/path', full_path: 'group/project') }
let(:container) do
instance_double('DesignRepository', project: project,
repository: instance_double('Repository', repository_storage: 'storage'))
end
it 'schedules a backup job with the correct parameters' do
expected_json = {
storage_name: 'storage',
relative_path: 'disk/path',
gl_project_path: 'group/project',
always_create: false
}.to_json
expect(input_stream).to receive(:puts).with(expected_json)
gitaly_backup.enqueue(container, :design)
end
end
context 'with an invalid repository type' do
it 'raises an error' do
expect do
gitaly_backup.enqueue(nil,
:invalid)
end.to raise_error(Gitlab::Backup::Cli::Errors::GitalyBackupError, /no container for repo type/)
end
end
end
end
end

View File

@ -55,17 +55,15 @@ RSpec.describe Gitlab::Backup::Cli::Targets::ObjectStorage::Google do
}
end
let(:backup_options) { instance_double("::Backup::Options", remote_directory: 'fake_backup_bucket') }
before do
allow(Gitlab).to receive(:config).and_return(gitlab_config)
allow(::Google::Cloud::StorageTransfer).to receive(:storage_transfer_service).and_return(client)
allow(gitlab_config).to receive(:[]).with('fake_object').and_return(supported_config)
end
subject(:object_storage) { described_class.new("fake_object", backup_options, supported_config) }
subject(:object_storage) { described_class.new("fake_object", 'fake_backup_bucket', supported_config) }
describe "#dump" do
describe "#dump", :silence_output do
context "when job exists" do
before do
allow(client).to receive(:get_transfer_job).and_return(backup_transfer_job)
@ -79,7 +77,7 @@ RSpec.describe Gitlab::Backup::Cli::Targets::ObjectStorage::Google do
transfer_job: updated_spec
)
expect(client).to receive(:run_transfer_job).with({ job_name: "fake_transfer_job", project_id: "fake_project" })
object_storage.dump(nil, 12345)
object_storage.dump(12345)
end
end
@ -94,12 +92,12 @@ RSpec.describe Gitlab::Backup::Cli::Targets::ObjectStorage::Google do
it "creates a new job" do
expect(client).to receive(:create_transfer_job)
.with(transfer_job: new_backup_transfer_job_spec).and_return(backup_transfer_job)
object_storage.dump(nil, 12345)
object_storage.dump(12345)
end
end
end
describe "#restore" do
describe "#restore", :silence_output do
context "when job exists" do
before do
allow(client).to receive(:get_transfer_job).and_return(restore_transfer_job)
@ -113,7 +111,7 @@ RSpec.describe Gitlab::Backup::Cli::Targets::ObjectStorage::Google do
transfer_job: updated_spec
)
expect(client).to receive(:run_transfer_job).with({ job_name: "fake_transfer_job", project_id: "fake_project" })
object_storage.restore(nil, 12345)
object_storage.restore(12345)
end
end
@ -128,7 +126,7 @@ RSpec.describe Gitlab::Backup::Cli::Targets::ObjectStorage::Google do
it "creates a new job" do
expect(client).to receive(:create_transfer_job)
.with(transfer_job: new_restore_transfer_job_spec).and_return(restore_transfer_job)
object_storage.restore(nil, 12345)
object_storage.restore(12345)
end
end
end

View File

@ -0,0 +1,74 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Backup::Cli::Targets::Repositories do
let(:context) { Gitlab::Backup::Cli::Context.build }
let(:strategy) { repo_target.strategy }
subject(:repo_target) { described_class.new(context) }
describe '#dump' do
it 'starts and finishes the strategy' do
expect(strategy).to receive(:start).with(:create, '/path/to/destination')
expect(repo_target).to receive(:enqueue_consecutive)
expect(strategy).to receive(:finish!)
repo_target.dump('/path/to/destination')
end
end
describe '#restore' do
it 'starts and finishes the strategy' do
expect(strategy).to receive(:start).with(:restore, '/path/to/destination', remove_all_repositories: ["default"])
expect(repo_target).to receive(:enqueue_consecutive)
expect(strategy).to receive(:finish!)
expect(repo_target).to receive(:restore_object_pools)
repo_target.restore('/path/to/destination')
end
end
describe '#enqueue_consecutive' do
it 'calls enqueue_consecutive_projects and enqueue_consecutive_snippets' do
expect(repo_target).to receive(:enqueue_consecutive_projects)
expect(repo_target).to receive(:enqueue_consecutive_snippets)
repo_target.send(:enqueue_consecutive)
end
end
describe '#enqueue_project' do
let(:project) { instance_double('Project', design_management_repository: nil) }
it 'enqueues project and wiki' do
expect(strategy).to receive(:enqueue).with(project, Gitlab::Backup::Cli::RepoType::PROJECT)
expect(strategy).to receive(:enqueue).with(project, Gitlab::Backup::Cli::RepoType::WIKI)
repo_target.send(:enqueue_project, project)
end
context 'when project has design management repository' do
let(:design_repo) { instance_double('DesignRepository') }
let(:project) { instance_double('Project', design_management_repository: design_repo) }
it 'enqueues project, wiki, and design' do
expect(strategy).to receive(:enqueue).with(project, Gitlab::Backup::Cli::RepoType::PROJECT)
expect(strategy).to receive(:enqueue).with(project, Gitlab::Backup::Cli::RepoType::WIKI)
expect(strategy).to receive(:enqueue).with(design_repo, Gitlab::Backup::Cli::RepoType::DESIGN)
repo_target.send(:enqueue_project, project)
end
end
end
describe '#enqueue_snippet' do
let(:snippet) { instance_double('Snippet') }
it 'enqueues the snippet' do
expect(strategy).to receive(:enqueue).with(snippet, Gitlab::Backup::Cli::RepoType::SNIPPET)
repo_target.send(:enqueue_snippet, snippet)
end
end
end

View File

@ -1,12 +1,11 @@
# frozen_string_literal: true
RSpec.describe Gitlab::Backup::Cli::Tasks::Task do
let(:options) { instance_double("::Backup::Option", backup_id: "abc123") }
let(:context) { build_fake_context }
let(:tmpdir) { Pathname.new(Dir.mktmpdir('task', temp_path)) }
let(:metadata) { build(:backup_metadata) }
subject(:task) { described_class.new(options: options, context: context) }
subject(:task) { described_class.new(context: context) }
after do
FileUtils.rmtree(tmpdir)
@ -37,9 +36,9 @@ RSpec.describe Gitlab::Backup::Cli::Tasks::Task do
end
end
describe '#target' do
describe '#local' do
it 'raises an error' do
expect { task.send(:target) }.to raise_error(NotImplementedError)
expect { task.send(:local) }.to raise_error(NotImplementedError)
end
end
end
@ -49,7 +48,7 @@ RSpec.describe Gitlab::Backup::Cli::Tasks::Task do
expect(task).to receive(:destination_path).and_return(tmpdir.join('test_task'))
expect(task).to receive_message_chain(:target, :dump)
task.backup!(tmpdir, metadata.backup_id)
task.backup!(tmpdir)
end
end
@ -59,7 +58,7 @@ RSpec.describe Gitlab::Backup::Cli::Tasks::Task do
expect(task).to receive(:destination_path).and_return(tmpdir.join('test_task'))
expect(task).to receive_message_chain(:target, :restore)
task.restore!(archive_directory, options.backup_id)
task.restore!(archive_directory)
end
end
end

View File

@ -20,9 +20,13 @@ RSpec.describe Gitlab::Backup::Cli::Utils::Tar do
target_basepath = tempdir
target = tempdir.join('*')
result = nil
expect do
tar.pack_cmd(archive_file: archive_file, target_directory: target_basepath, target: target)
result = tar.pack_cmd(archive_file: archive_file, target_directory: target_basepath, target: target)
end.not_to raise_exception
expect(result).to be_a(Gitlab::Backup::Cli::Shell::Command)
end
end
@ -83,4 +87,51 @@ RSpec.describe Gitlab::Backup::Cli::Utils::Tar do
end
end
end
describe '#pack_from_stdin_cmd' do
it 'delegates parameters to pack_cmd passing archive_files: as -' do
tar_tempdir do |tempdir|
target_basepath = tempdir
target = tempdir.join('*')
excludes = ['lost+found']
expect(tar).to receive(:pack_cmd).with(
archive_file: '-',
target_directory: target_basepath,
target: target,
excludes: excludes)
tar.pack_from_stdin_cmd(target_directory: target_basepath, target: target, excludes: excludes)
end
end
end
describe '#extract_cmd' do
it 'instantiate a Shell::Command with default required params' do
tar_tempdir do |tempdir|
archive_file = tempdir.join('testarchive.tar')
target_basepath = tempdir
result = nil
expect do
result = tar.extract_cmd(archive_file: archive_file, target_directory: target_basepath)
end.not_to raise_exception
expect(result).to be_a(Gitlab::Backup::Cli::Shell::Command)
end
end
end
describe 'extract_from_stdin_cmd' do
it 'delegates parameters to extract_cmd passing archive_files: as -' do
tar_tempdir do |tempdir|
target_basepath = tempdir
expect(tar).to receive(:extract_cmd).with(archive_file: '-', target_directory: target_basepath)
tar.extract_from_stdin_cmd(target_directory: target_basepath)
end
end
end
end

View File

@ -7,6 +7,7 @@ require 'thor'
require 'gitlab/rspec/next_instance_of'
ENV["RAILS_ENV"] ||= "test"
GITLAB_PATH = File.expand_path(File.join(__dir__, '/fixtures/'))
# Load spec support code
Dir['spec/support/**/*.rb'].each { |f| load f }

View File

@ -24,7 +24,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
describe '#backup_basedir' do
context 'with a relative path configured in gitlab.yml' do
it 'returns a full path based on gitlab basepath' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.backup_basedir).to eq(fake_gitlab_basepath.join('tmp/tests/backups'))
end
@ -58,7 +58,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.ci_builds_path).to eq(Pathname('/tmp/gitlab/full/builds'))
end
@ -84,7 +84,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.ci_job_artifacts_path).to eq(Pathname('/tmp/gitlab/full/artifacts'))
end
@ -110,7 +110,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.ci_secure_files_path).to eq(Pathname('/tmp/gitlab/full/ci_secure_files'))
end
@ -136,7 +136,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.ci_lfs_path).to eq(Pathname('/tmp/gitlab/full/lfs-objects'))
end
@ -162,7 +162,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.packages_path).to eq(Pathname('/tmp/gitlab/full/packages'))
end
@ -188,7 +188,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.pages_path).to eq(Pathname('/tmp/gitlab/full/pages'))
end
@ -214,7 +214,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.registry_path).to eq(Pathname('/tmp/gitlab/full/registry'))
end
@ -240,7 +240,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.terraform_state_path).to eq(Pathname('/tmp/gitlab/full/terraform_state'))
end
@ -266,7 +266,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.upload_path).to eq(Pathname('/tmp/gitlab/full/public/uploads'))
end
@ -292,7 +292,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.send(:gitlab_shared_path)).to eq(Pathname('/tmp/gitlab/full/shared'))
end