diff --git a/app/assets/javascripts/import_entities/import_groups/components/import_target_cell.vue b/app/assets/javascripts/import_entities/import_groups/components/import_target_cell.vue
index 7359d4f239e..daced740c94 100644
--- a/app/assets/javascripts/import_entities/import_groups/components/import_target_cell.vue
+++ b/app/assets/javascripts/import_entities/import_groups/components/import_target_cell.vue
@@ -3,14 +3,16 @@ import {
GlDropdownDivider,
GlDropdownItem,
GlDropdownSectionHeader,
- GlLink,
GlFormInput,
} from '@gitlab/ui';
-import { joinPaths } from '~/lib/utils/url_utility';
import { s__ } from '~/locale';
import ImportGroupDropdown from '../../components/group_dropdown.vue';
-import { STATUSES } from '../../constants';
-import { isInvalid, getInvalidNameValidationMessage, isNameValid } from '../utils';
+import {
+ isInvalid,
+ getInvalidNameValidationMessage,
+ isNameValid,
+ isAvailableForImport,
+} from '../utils';
export default {
components: {
@@ -18,7 +20,6 @@ export default {
GlDropdownDivider,
GlDropdownItem,
GlDropdownSectionHeader,
- GlLink,
GlFormInput,
},
props: {
@@ -61,20 +62,8 @@ export default {
return isNameValid(this.group, this.groupPathRegex);
},
- isAlreadyImported() {
- return this.group.progress.status !== STATUSES.NONE;
- },
-
- isFinished() {
- return this.group.progress.status === STATUSES.FINISHED;
- },
-
- fullPath() {
- return `${this.importTarget.target_namespace}/${this.importTarget.new_name}`;
- },
-
- absolutePath() {
- return joinPaths(gon.relative_url_root || '/', this.fullPath);
+ isAvailableForImport() {
+ return isAvailableForImport(this.group);
},
},
@@ -85,25 +74,11 @@ export default {
-
- {{ fullPath }}
-
-
-
+
/
@@ -141,11 +116,11 @@ export default {
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js b/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js
index 57188441158..c08cf909a00 100644
--- a/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js
@@ -5,10 +5,13 @@ import { parseIntPagination, normalizeHeaders } from '~/lib/utils/common_utils';
import { s__ } from '~/locale';
import { STATUSES } from '../../constants';
import { i18n, NEW_NAME_FIELD } from '../constants';
+import { isAvailableForImport } from '../utils';
import bulkImportSourceGroupItemFragment from './fragments/bulk_import_source_group_item.fragment.graphql';
+import bulkImportSourceGroupProgressFragment from './fragments/bulk_import_source_group_progress.fragment.graphql';
import addValidationErrorMutation from './mutations/add_validation_error.mutation.graphql';
import removeValidationErrorMutation from './mutations/remove_validation_error.mutation.graphql';
import setImportProgressMutation from './mutations/set_import_progress.mutation.graphql';
+import setImportTargetMutation from './mutations/set_import_target.mutation.graphql';
import updateImportStatusMutation from './mutations/update_import_status.mutation.graphql';
import availableNamespacesQuery from './queries/available_namespaces.query.graphql';
import bulkImportSourceGroupQuery from './queries/bulk_import_source_group.query.graphql';
@@ -34,6 +37,7 @@ function makeGroup(data) {
};
const NESTED_OBJECT_FIELDS = {
import_target: clientTypenames.BulkImportTarget,
+ last_import_target: clientTypenames.BulkImportTarget,
progress: clientTypenames.BulkImportProgress,
};
@@ -55,6 +59,7 @@ async function checkImportTargetIsValid({ client, newName, targetNamespace, sour
data: { existingGroup, existingProject },
} = await client.query({
query: groupAndProjectQuery,
+ fetchPolicy: 'no-cache',
variables: {
fullPath: `${targetNamespace}/${newName}`,
},
@@ -82,6 +87,7 @@ async function checkImportTargetIsValid({ client, newName, targetNamespace, sour
}
const localProgressId = (id) => `not-started-${id}`;
+const nextName = (name) => `${name}-1`;
export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGroupsManager }) {
const groupsManager = new GroupsManager({
@@ -140,17 +146,28 @@ export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGr
const { jobId, importState: cachedImportState } =
groupsManager.getImportStateFromStorageByGroupId(group.id) ?? {};
+ const status = cachedImportState?.status ?? STATUSES.NONE;
+
+ const importTarget =
+ status === STATUSES.FINISHED && cachedImportState.importTarget
+ ? {
+ target_namespace: cachedImportState.importTarget.target_namespace,
+ new_name: nextName(cachedImportState.importTarget.new_name),
+ }
+ : cachedImportState?.importTarget ?? {
+ new_name: group.full_path,
+ target_namespace: availableNamespaces[0]?.full_path ?? '',
+ };
+
return makeGroup({
...group,
validation_errors: [],
progress: {
id: jobId ?? localProgressId(group.id),
- status: cachedImportState?.status ?? STATUSES.NONE,
- },
- import_target: cachedImportState?.importTarget ?? {
- new_name: group.full_path,
- target_namespace: availableNamespaces[0]?.full_path ?? '',
+ status,
},
+ import_target: importTarget,
+ last_import_target: cachedImportState?.importTarget ?? null,
});
}),
pageInfo: {
@@ -161,7 +178,7 @@ export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGr
setTimeout(() => {
response.nodes.forEach((group) => {
- if (group.progress.status === STATUSES.NONE) {
+ if (isAvailableForImport(group)) {
checkImportTargetIsValid({
client,
newName: group.import_target.new_name,
@@ -193,32 +210,18 @@ export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGr
targetNamespace,
newName,
});
+
return makeGroup({
id: sourceGroupId,
import_target: {
target_namespace: targetNamespace,
new_name: newName,
+ id: sourceGroupId,
},
});
},
- setTargetNamespace: (_, { targetNamespace, sourceGroupId }) =>
- makeGroup({
- id: sourceGroupId,
- import_target: {
- target_namespace: targetNamespace,
- },
- }),
-
- setNewName: (_, { newName, sourceGroupId }) =>
- makeGroup({
- id: sourceGroupId,
- import_target: {
- new_name: newName,
- },
- }),
-
- async setImportProgress(_, { sourceGroupId, status, jobId }) {
+ async setImportProgress(_, { sourceGroupId, status, jobId, importTarget }) {
if (jobId) {
groupsManager.updateImportProgress(jobId, status);
}
@@ -229,16 +232,46 @@ export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGr
id: jobId ?? localProgressId(sourceGroupId),
status,
},
+ last_import_target: {
+ __typename: clientTypenames.BulkImportTarget,
+ ...importTarget,
+ },
});
},
- async updateImportStatus(_, { id, status }) {
- groupsManager.updateImportProgress(id, status);
+ async updateImportStatus(_, { id, status: newStatus }, { client, getCacheKey }) {
+ groupsManager.updateImportProgress(id, newStatus);
+
+ const progressItem = client.readFragment({
+ fragment: bulkImportSourceGroupProgressFragment,
+ fragmentName: 'BulkImportSourceGroupProgress',
+ id: getCacheKey({
+ __typename: clientTypenames.BulkImportProgress,
+ id,
+ }),
+ });
+
+ const isInProgress = Boolean(progressItem);
+ const { status: currentStatus } = progressItem ?? {};
+ if (newStatus === STATUSES.FINISHED && isInProgress && currentStatus !== newStatus) {
+ const groups = groupsManager.getImportedGroupsByJobId(id);
+
+ groups.forEach(async ({ id: groupId, importTarget }) => {
+ client.mutate({
+ mutation: setImportTargetMutation,
+ variables: {
+ sourceGroupId: groupId,
+ targetNamespace: importTarget.target_namespace,
+ newName: nextName(importTarget.new_name),
+ },
+ });
+ });
+ }
return {
__typename: clientTypenames.BulkImportProgress,
id,
- status,
+ status: newStatus,
};
},
@@ -327,10 +360,10 @@ export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGr
return { status: STATUSES.NONE };
})
.then((newStatus) =>
- sourceGroupIds.forEach((sourceGroupId) =>
+ sourceGroupIds.forEach((sourceGroupId, idx) =>
client.mutate({
mutation: setImportProgressMutation,
- variables: { sourceGroupId, ...newStatus },
+ variables: { sourceGroupId, ...newStatus, importTarget: groups[idx].import_target },
}),
),
)
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_item.fragment.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_item.fragment.graphql
index 47675cd1bd0..089340b3c48 100644
--- a/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_item.fragment.graphql
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_item.fragment.graphql
@@ -12,6 +12,10 @@ fragment BulkImportSourceGroupItem on ClientBulkImportSourceGroup {
target_namespace
new_name
}
+ last_import_target {
+ target_namespace
+ new_name
+ }
validation_errors {
field
message
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/mutations/set_import_progress.mutation.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/mutations/set_import_progress.mutation.graphql
index 2ec1269932a..43301554de3 100644
--- a/app/assets/javascripts/import_entities/import_groups/graphql/mutations/set_import_progress.mutation.graphql
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/mutations/set_import_progress.mutation.graphql
@@ -1,9 +1,23 @@
-mutation setImportProgress($status: String!, $sourceGroupId: String!, $jobId: String) {
- setImportProgress(status: $status, sourceGroupId: $sourceGroupId, jobId: $jobId) @client {
+mutation setImportProgress(
+ $status: String!
+ $sourceGroupId: String!
+ $jobId: String
+ $importTarget: ImportTargetInput!
+) {
+ setImportProgress(
+ status: $status
+ sourceGroupId: $sourceGroupId
+ jobId: $jobId
+ importTarget: $importTarget
+ ) @client {
id
progress {
id
status
}
+ last_import_target {
+ target_namespace
+ new_name
+ }
}
}
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/services/source_groups_manager.js b/app/assets/javascripts/import_entities/import_groups/graphql/services/source_groups_manager.js
index 97dbdbf518a..7caa37d9ad4 100644
--- a/app/assets/javascripts/import_entities/import_groups/graphql/services/source_groups_manager.js
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/services/source_groups_manager.js
@@ -35,15 +35,18 @@ export class SourceGroupsManager {
}
createImportState(importId, jobConfig) {
- this.importStates[this.getStorageKey(importId)] = {
+ this.importStates[importId] = {
status: jobConfig.status,
- groups: jobConfig.groups.map((g) => ({ importTarget: g.import_target, id: g.id })),
+ groups: jobConfig.groups.map((g) => ({
+ importTarget: { ...g.import_target },
+ id: g.id,
+ })),
};
this.saveImportStatesToStorage();
}
updateImportProgress(importId, status) {
- const currentState = this.importStates[this.getStorageKey(importId)];
+ const currentState = this.importStates[importId];
if (!currentState) {
return;
}
@@ -52,12 +55,15 @@ export class SourceGroupsManager {
this.saveImportStatesToStorage();
}
+ getImportedGroupsByJobId(jobId) {
+ return this.importStates[jobId]?.groups ?? [];
+ }
+
getImportStateFromStorageByGroupId(groupId) {
- const PREFIX = this.getStorageKey('');
const [jobId, importState] =
- Object.entries(this.importStates).find(
- ([key, state]) => key.startsWith(PREFIX) && state.groups.some((g) => g.id === groupId),
- ) ?? [];
+ Object.entries(this.importStates)
+ .reverse()
+ .find(([, state]) => state.groups.some((g) => g.id === groupId)) ?? [];
if (!jobId) {
return null;
@@ -67,10 +73,6 @@ export class SourceGroupsManager {
return { jobId, importState: { ...group, status: importState.status } };
}
- getStorageKey(importId) {
- return `${this.sourceUrl}|${importId}`;
- }
-
saveImportStatesToStorage = debounce(() => {
try {
// storage might be changed in other tab so fetch first
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql
index c830aaa75e6..6ef4bbafec0 100644
--- a/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql
@@ -30,6 +30,7 @@ type ClientBulkImportSourceGroup {
full_name: String!
progress: ClientBulkImportProgress!
import_target: ClientBulkImportTarget!
+ last_import_target: ClientBulkImportTarget
validation_errors: [ClientBulkImportValidationError!]!
}
@@ -50,11 +51,21 @@ extend type Query {
availableNamespaces: [ClientBulkImportAvailableNamespace!]!
}
+input InputTargetInput {
+ target_namespace: String!
+ new_name: String!
+}
+
extend type Mutation {
setNewName(newName: String, sourceGroupId: ID!): ClientBulkImportSourceGroup!
setTargetNamespace(targetNamespace: String, sourceGroupId: ID!): ClientBulkImportSourceGroup!
importGroups(sourceGroupIds: [ID!]!): [ClientBulkImportSourceGroup!]!
- setImportProgress(id: ID, status: String!): ClientBulkImportSourceGroup!
+ setImportProgress(
+ id: ID
+ status: String!
+ jobId: String
+ importTarget: ImportTargetInput!
+ ): ClientBulkImportSourceGroup!
updateImportProgress(id: ID, status: String!): ClientBulkImportProgress
addValidationError(
sourceGroupId: ID!
diff --git a/app/assets/javascripts/import_entities/import_groups/utils.js b/app/assets/javascripts/import_entities/import_groups/utils.js
index b451008b6f9..a1baeaf39dd 100644
--- a/app/assets/javascripts/import_entities/import_groups/utils.js
+++ b/app/assets/javascripts/import_entities/import_groups/utils.js
@@ -1,3 +1,4 @@
+import { STATUSES } from '../constants';
import { NEW_NAME_FIELD } from './constants';
export function isNameValid(group, validationRegex) {
@@ -11,3 +12,11 @@ export function getInvalidNameValidationMessage(group) {
export function isInvalid(group, validationRegex) {
return Boolean(!isNameValid(group, validationRegex) || getInvalidNameValidationMessage(group));
}
+
+export function isFinished(group) {
+ return group.progress.status === STATUSES.FINISHED;
+}
+
+export function isAvailableForImport(group) {
+ return [STATUSES.NONE, STATUSES.FINISHED].some((status) => group.progress.status === status);
+}
diff --git a/app/assets/javascripts/right_sidebar.js b/app/assets/javascripts/right_sidebar.js
index c3b5e1ac266..23254fcc2eb 100644
--- a/app/assets/javascripts/right_sidebar.js
+++ b/app/assets/javascripts/right_sidebar.js
@@ -102,7 +102,7 @@ Sidebar.prototype.toggleTodo = function (e) {
})
.catch(() =>
createFlash({
- message: sprintf(__('There was an error %{message} todo.'), {
+ message: sprintf(__('There was an error %{message} to-do item.'), {
message:
ajaxType === 'post' ? s__('RightSidebar|adding a') : s__('RightSidebar|deleting the'),
}),
diff --git a/app/graphql/resolvers/concerns/resolves_pipelines.rb b/app/graphql/resolvers/concerns/resolves_pipelines.rb
index 77f2105db7c..7fb0852b11e 100644
--- a/app/graphql/resolvers/concerns/resolves_pipelines.rb
+++ b/app/graphql/resolvers/concerns/resolves_pipelines.rb
@@ -17,6 +17,11 @@ module ResolvesPipelines
GraphQL::Types::String,
required: false,
description: "Filter pipelines by the sha of the commit they are run for."
+
+ argument :source,
+ GraphQL::Types::String,
+ required: false,
+ description: "Filter pipelines by their source. Will be ignored if `dast_view_scans` feature flag is disabled."
end
class_methods do
@@ -30,6 +35,8 @@ module ResolvesPipelines
end
def resolve_pipelines(project, params = {})
+ params.delete(:source) unless Feature.enabled?(:dast_view_scans, project, default_enabled: :yaml)
+
Ci::PipelinesFinder.new(project, context[:current_user], params).execute
end
end
diff --git a/app/graphql/types/customer_relations/contact_type.rb b/app/graphql/types/customer_relations/contact_type.rb
new file mode 100644
index 00000000000..35b5bf45698
--- /dev/null
+++ b/app/graphql/types/customer_relations/contact_type.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+module Types
+ module CustomerRelations
+ class ContactType < BaseObject
+ graphql_name 'CustomerRelationsContact'
+
+ authorize :read_contact
+
+ field :id,
+ GraphQL::Types::ID,
+ null: false,
+ description: 'Internal ID of the contact.'
+
+ field :organization, Types::CustomerRelations::OrganizationType,
+ null: true,
+ description: "Organization of the contact."
+
+ field :first_name,
+ GraphQL::Types::String,
+ null: false,
+ description: 'First name of the contact.'
+
+ field :last_name,
+ GraphQL::Types::String,
+ null: false,
+ description: 'Last name of the contact.'
+
+ field :phone,
+ GraphQL::Types::String,
+ null: true,
+ description: 'Phone number of the contact.'
+
+ field :email,
+ GraphQL::Types::String,
+ null: true,
+ description: 'Email address of the contact.'
+
+ field :description,
+ GraphQL::Types::String,
+ null: true,
+ description: 'Description or notes for the contact.'
+
+ field :created_at,
+ Types::TimeType,
+ null: false,
+ description: 'Timestamp the contact was created.'
+
+ field :updated_at,
+ Types::TimeType,
+ null: false,
+ description: 'Timestamp the contact was last updated.'
+ end
+ end
+end
diff --git a/app/graphql/types/customer_relations/organization_type.rb b/app/graphql/types/customer_relations/organization_type.rb
index b5db821bc9c..b629c4c0566 100644
--- a/app/graphql/types/customer_relations/organization_type.rb
+++ b/app/graphql/types/customer_relations/organization_type.rb
@@ -29,12 +29,12 @@ module Types
field :created_at,
Types::TimeType,
- null: true,
+ null: false,
description: 'Timestamp the organization was created.'
field :updated_at,
Types::TimeType,
- null: true,
+ null: false,
description: 'Timestamp the organization was last updated.'
end
end
diff --git a/app/graphql/types/group_type.rb b/app/graphql/types/group_type.rb
index 0c1497d0a31..80b87044298 100644
--- a/app/graphql/types/group_type.rb
+++ b/app/graphql/types/group_type.rb
@@ -199,6 +199,10 @@ module Types
null: true,
description: "Find organizations of this group."
+ field :contacts, Types::CustomerRelations::ContactType.connection_type,
+ null: true,
+ description: "Find contacts of this group."
+
def avatar_url
object.avatar_url(only_path: false)
end
diff --git a/app/models/group.rb b/app/models/group.rb
index 1e83f271052..c6ab8ac7a64 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -760,6 +760,10 @@ class Group < Namespace
::CustomerRelations::Organization.where(group_id: self.id)
end
+ def contacts
+ ::CustomerRelations::Contact.where(group_id: self.id)
+ end
+
private
def max_member_access(user_ids)
diff --git a/app/models/namespaces/traversal/linear_scopes.rb b/app/models/namespaces/traversal/linear_scopes.rb
index 90fae8ef35d..2da0e48c2da 100644
--- a/app/models/namespaces/traversal/linear_scopes.rb
+++ b/app/models/namespaces/traversal/linear_scopes.rb
@@ -15,6 +15,28 @@ module Namespaces
select('namespaces.traversal_ids[array_length(namespaces.traversal_ids, 1)] AS id')
end
+ def self_and_ancestors(include_self: true, hierarchy_order: nil)
+ return super unless use_traversal_ids_for_ancestor_scopes?
+
+ records = unscoped
+ .without_sti_condition
+ .where(id: without_sti_condition.select('unnest(traversal_ids)'))
+ .order_by_depth(hierarchy_order)
+ .normal_select
+
+ if include_self
+ records
+ else
+ records.where.not(id: all.as_ids)
+ end
+ end
+
+ def self_and_ancestor_ids(include_self: true)
+ return super unless use_traversal_ids_for_ancestor_scopes?
+
+ self_and_ancestors(include_self: include_self).as_ids
+ end
+
def self_and_descendants(include_self: true)
return super unless use_traversal_ids?
@@ -22,11 +44,7 @@ module Namespaces
distinct = records.select('DISTINCT on(namespaces.id) namespaces.*')
- # Produce a query of the form: SELECT * FROM namespaces;
- #
- # When we have queries that break this SELECT * format we can run in to errors.
- # For example `SELECT DISTINCT on(...)` will fail when we chain a `.count` c
- unscoped.without_sti_condition.from(distinct, :namespaces)
+ distinct.normal_select
end
def self_and_descendant_ids(include_self: true)
@@ -42,12 +60,35 @@ module Namespaces
unscope(where: :type)
end
+ def order_by_depth(hierarchy_order)
+ return all unless hierarchy_order
+
+ depth_order = hierarchy_order == :asc ? :desc : :asc
+
+ all
+ .select(Arel.star, 'array_length(traversal_ids, 1) as depth')
+ .order(depth: depth_order, id: :asc)
+ end
+
+ # Produce a query of the form: SELECT * FROM namespaces;
+ #
+ # When we have queries that break this SELECT * format we can run in to errors.
+ # For example `SELECT DISTINCT on(...)` will fail when we chain a `.count` c
+ def normal_select
+ unscoped.without_sti_condition.from(all, :namespaces)
+ end
+
private
def use_traversal_ids?
Feature.enabled?(:use_traversal_ids, default_enabled: :yaml)
end
+ def use_traversal_ids_for_ancestor_scopes?
+ Feature.enabled?(:use_traversal_ids_for_ancestor_scopes, default_enabled: :yaml) &&
+ use_traversal_ids?
+ end
+
def self_and_descendants_with_duplicates(include_self: true)
base_ids = select(:id)
diff --git a/app/models/namespaces/traversal/recursive_scopes.rb b/app/models/namespaces/traversal/recursive_scopes.rb
index be49d5d9d55..6659cefe095 100644
--- a/app/models/namespaces/traversal/recursive_scopes.rb
+++ b/app/models/namespaces/traversal/recursive_scopes.rb
@@ -10,6 +10,22 @@ module Namespaces
select('id')
end
+ def self_and_ancestors(include_self: true, hierarchy_order: nil)
+ records = Gitlab::ObjectHierarchy.new(all).base_and_ancestors(hierarchy_order: hierarchy_order)
+
+ if include_self
+ records
+ else
+ records.where.not(id: all.as_ids)
+ end
+ end
+ alias_method :recursive_self_and_ancestors, :self_and_ancestors
+
+ def self_and_ancestor_ids(include_self: true)
+ self_and_ancestors(include_self: include_self).as_ids
+ end
+ alias_method :recursive_self_and_ancestor_ids, :self_and_ancestor_ids
+
def descendant_ids
recursive_descendants.as_ids
end
diff --git a/app/policies/customer_relations/contact_policy.rb b/app/policies/customer_relations/contact_policy.rb
new file mode 100644
index 00000000000..8367649b50c
--- /dev/null
+++ b/app/policies/customer_relations/contact_policy.rb
@@ -0,0 +1,6 @@
+# frozen_string_literal: true
+module CustomerRelations
+ class ContactPolicy < BasePolicy
+ delegate { @subject.group }
+ end
+end
diff --git a/app/policies/group_policy.rb b/app/policies/group_policy.rb
index 92a8cab01d9..ffe908593ae 100644
--- a/app/policies/group_policy.rb
+++ b/app/policies/group_policy.rb
@@ -113,6 +113,7 @@ class GroupPolicy < BasePolicy
enable :read_custom_emoji
enable :read_counts
enable :read_organization
+ enable :read_contact
end
rule { ~public_group & ~has_access }.prevent :read_counts
diff --git a/config/feature_flags/development/dast_view_scans.yml b/config/feature_flags/development/dast_view_scans.yml
new file mode 100644
index 00000000000..39c14097b60
--- /dev/null
+++ b/config/feature_flags/development/dast_view_scans.yml
@@ -0,0 +1,8 @@
+---
+name: dast_view_scans
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/69571
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/340388
+milestone: '14.3'
+type: development
+group: group::dynamic analysis
+default_enabled: false
diff --git a/config/feature_flags/development/use_traversal_ids_for_ancestor_scopes.yml b/config/feature_flags/development/use_traversal_ids_for_ancestor_scopes.yml
new file mode 100644
index 00000000000..7f398fc5f0b
--- /dev/null
+++ b/config/feature_flags/development/use_traversal_ids_for_ancestor_scopes.yml
@@ -0,0 +1,8 @@
+---
+name: use_traversal_ids_for_ancestor_scopes
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67652
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/340159
+milestone: '14.3'
+type: development
+group: group::access
+default_enabled: false
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index af9a70725c1..fa19a502c8f 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -5318,6 +5318,29 @@ The edge type for [`CustomEmoji`](#customemoji).
| `cursor` | [`String!`](#string) | A cursor for use in pagination. |
| `node` | [`CustomEmoji`](#customemoji) | The item at the end of the edge. |
+#### `CustomerRelationsContactConnection`
+
+The connection type for [`CustomerRelationsContact`](#customerrelationscontact).
+
+##### Fields
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| `edges` | [`[CustomerRelationsContactEdge]`](#customerrelationscontactedge) | A list of edges. |
+| `nodes` | [`[CustomerRelationsContact]`](#customerrelationscontact) | A list of nodes. |
+| `pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
+
+#### `CustomerRelationsContactEdge`
+
+The edge type for [`CustomerRelationsContact`](#customerrelationscontact).
+
+##### Fields
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| `cursor` | [`String!`](#string) | A cursor for use in pagination. |
+| `node` | [`CustomerRelationsContact`](#customerrelationscontact) | The item at the end of the edge. |
+
#### `CustomerRelationsOrganizationConnection`
The connection type for [`CustomerRelationsOrganization`](#customerrelationsorganization).
@@ -8369,6 +8392,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
| ---- | ---- | ----------- |
| `ref` | [`String`](#string) | Filter pipelines by the ref they are run for. |
| `sha` | [`String`](#string) | Filter pipelines by the sha of the commit they are run for. |
+| `source` | [`String`](#string) | Filter pipelines by their source. Will be ignored if `dast_view_scans` feature flag is disabled. |
| `status` | [`PipelineStatusEnum`](#pipelinestatusenum) | Filter pipelines by their status. |
### `ComplianceFramework`
@@ -8544,18 +8568,34 @@ A custom emoji uploaded by user.
| `name` | [`String!`](#string) | Name of the emoji. |
| `url` | [`String!`](#string) | Link to file of the emoji. |
+### `CustomerRelationsContact`
+
+#### Fields
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| `createdAt` | [`Time!`](#time) | Timestamp the contact was created. |
+| `description` | [`String`](#string) | Description or notes for the contact. |
+| `email` | [`String`](#string) | Email address of the contact. |
+| `firstName` | [`String!`](#string) | First name of the contact. |
+| `id` | [`ID!`](#id) | Internal ID of the contact. |
+| `lastName` | [`String!`](#string) | Last name of the contact. |
+| `organization` | [`CustomerRelationsOrganization`](#customerrelationsorganization) | Organization of the contact. |
+| `phone` | [`String`](#string) | Phone number of the contact. |
+| `updatedAt` | [`Time!`](#time) | Timestamp the contact was last updated. |
+
### `CustomerRelationsOrganization`
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
-| `createdAt` | [`Time`](#time) | Timestamp the organization was created. |
+| `createdAt` | [`Time!`](#time) | Timestamp the organization was created. |
| `defaultRate` | [`Float`](#float) | Standard billing rate for the organization. |
| `description` | [`String`](#string) | Description or notes for the organization. |
| `id` | [`ID!`](#id) | Internal ID of the organization. |
| `name` | [`String`](#string) | Name of the organization. |
-| `updatedAt` | [`Time`](#time) | Timestamp the organization was last updated. |
+| `updatedAt` | [`Time!`](#time) | Timestamp the organization was last updated. |
### `DastProfile`
@@ -9782,6 +9822,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
| `autoDevopsEnabled` | [`Boolean`](#boolean) | Indicates whether Auto DevOps is enabled for all projects within this group. |
| `avatarUrl` | [`String`](#string) | Avatar URL of the group. |
| `billableMembersCount` | [`Int`](#int) | Number of billable users in the group. |
+| `contacts` | [`CustomerRelationsContactConnection`](#customerrelationscontactconnection) | Find contacts of this group. (see [Connections](#connections)) |
| `containerRepositoriesCount` | [`Int!`](#int) | Number of container repositories in the group. |
| `containsLockedProjects` | [`Boolean!`](#boolean) | Includes at least one project where the repository size exceeds the limit. |
| `customEmoji` | [`CustomEmojiConnection`](#customemojiconnection) | Custom emoji within this namespace. Available only when feature flag `custom_emoji` is enabled. This flag is disabled by default, because the feature is experimental and is subject to change without notice. (see [Connections](#connections)) |
@@ -10990,6 +11031,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
| ---- | ---- | ----------- |
| `ref` | [`String`](#string) | Filter pipelines by the ref they are run for. |
| `sha` | [`String`](#string) | Filter pipelines by the sha of the commit they are run for. |
+| `source` | [`String`](#string) | Filter pipelines by their source. Will be ignored if `dast_view_scans` feature flag is disabled. |
| `status` | [`PipelineStatusEnum`](#pipelinestatusenum) | Filter pipelines by their status. |
##### `MergeRequest.reference`
@@ -12777,6 +12819,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
| ---- | ---- | ----------- |
| `ref` | [`String`](#string) | Filter pipelines by the ref they are run for. |
| `sha` | [`String`](#string) | Filter pipelines by the sha of the commit they are run for. |
+| `source` | [`String`](#string) | Filter pipelines by their source. Will be ignored if `dast_view_scans` feature flag is disabled. |
| `status` | [`PipelineStatusEnum`](#pipelinestatusenum) | Filter pipelines by their status. |
##### `Project.projectMembers`
diff --git a/doc/development/testing_guide/frontend_testing.md b/doc/development/testing_guide/frontend_testing.md
index c70d796dfec..89719ad5ce0 100644
--- a/doc/development/testing_guide/frontend_testing.md
+++ b/doc/development/testing_guide/frontend_testing.md
@@ -1034,6 +1034,16 @@ If you introduce new helpers, place them in that directory.
We have a helper available to make testing actions easier, as per [official documentation](https://vuex.vuejs.org/guide/testing.html):
```javascript
+// prefer using like this, a single object argument so parameters are obvious from reading the test
+await testAction({
+ action: actions.actionName,
+ payload: { deleteListId: 1 },
+ state: { lists: [1, 2, 3] },
+ expectedMutations: [ { type: types.MUTATION} ],
+ expectedActions: [],
+});
+
+// old way, don't do this for new tests
testAction(
actions.actionName, // action
{ }, // params to be passed to action
@@ -1050,8 +1060,6 @@ testAction(
);
```
-Check an example in [`spec/frontend/ide/stores/actions_spec.js`](https://gitlab.com/gitlab-org/gitlab/-/blob/fdc7197609dfa7caeb1d962042a26248e49f27da/spec/frontend/ide/stores/actions_spec.js#L392).
-
### Wait until Axios requests finish
diff --git a/doc/topics/git/git_rebase.md b/doc/topics/git/git_rebase.md
index 0e288f1445e..b09f9fa0f6c 100644
--- a/doc/topics/git/git_rebase.md
+++ b/doc/topics/git/git_rebase.md
@@ -228,8 +228,13 @@ git push --force-with-lease origin my-feature-branch
```
If the branch you want to force-push is [protected](../../user/project/protected_branches.md),
-you can't force-push to it unless you unprotect it first. Then you can
-force-push and re-protect it.
+you can't force push to it unless you either:
+
+- Unprotect it.
+- [Allow force push](../../user/project/protected_branches.md#allow-force-push-on-a-protected-branch)
+ to it.
+
+Then you can force push and protect it again.
## Merge conflicts
diff --git a/doc/update/index.md b/doc/update/index.md
index 8851d51d4d1..a933b347449 100644
--- a/doc/update/index.md
+++ b/doc/update/index.md
@@ -372,9 +372,8 @@ for how to proceed.
- Due to an issue where `BatchedBackgroundMigrationWorkers` were
[not working](https://gitlab.com/gitlab-org/charts/gitlab/-/issues/2785#note_614738345)
for self-managed instances, a [fix was created](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65106)
- and a [14.0.Z](#1400) version was released. If you haven't updated to 14.0.Z, you need
- to update to at least 14.1.0 that contains the same fix before you update to
- to 14.2.
+ and a [14.0.Z](#1400) version was released. If you haven't updated to 14.0.5, you need
+ to update to at least 14.1.0 that contains the same fix before you update to 14.2.
- GitLab 14.2.0 contains background migrations to [address Primary Key overflow risk for tables with an integer PK](https://gitlab.com/groups/gitlab-org/-/epics/4785) for the tables listed below:
- [`ci_build_needs`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65216)
@@ -402,7 +401,7 @@ for how to proceed.
- Due to an issue where `BatchedBackgroundMigrationWorkers` were
[not working](https://gitlab.com/gitlab-org/charts/gitlab/-/issues/2785#note_614738345)
for self-managed instances, a [fix was created](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65106)
- and a [14.0.Z](#1400) version was released. If you haven't updated to 14.0.Z, you need
+ and a [14.0.Z](#1400) version was released. If you haven't updated to 14.0.5, you need
to update to at least 14.1.0 that contains the same fix before you update to
a later version.
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index a477f40697c..2a996dc77c2 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -603,17 +603,17 @@ module Gitlab
# new_column - The name of the new column.
# trigger_name - The name of the trigger to use (optional).
def install_rename_triggers(table, old, new, trigger_name: nil)
- Gitlab::Database::UnidirectionalCopyTrigger.on_table(table).create(old, new, trigger_name: trigger_name)
+ Gitlab::Database::UnidirectionalCopyTrigger.on_table(table, connection: connection).create(old, new, trigger_name: trigger_name)
end
# Removes the triggers used for renaming a column concurrently.
def remove_rename_triggers(table, trigger)
- Gitlab::Database::UnidirectionalCopyTrigger.on_table(table).drop(trigger)
+ Gitlab::Database::UnidirectionalCopyTrigger.on_table(table, connection: connection).drop(trigger)
end
# Returns the (base) name to use for triggers when renaming columns.
def rename_trigger_name(table, old, new)
- Gitlab::Database::UnidirectionalCopyTrigger.on_table(table).name(old, new)
+ Gitlab::Database::UnidirectionalCopyTrigger.on_table(table, connection: connection).name(old, new)
end
# Changes the type of a column concurrently.
diff --git a/lib/gitlab/database/with_lock_retries.rb b/lib/gitlab/database/with_lock_retries.rb
index e55390e679a..f9d467ae5cc 100644
--- a/lib/gitlab/database/with_lock_retries.rb
+++ b/lib/gitlab/database/with_lock_retries.rb
@@ -61,7 +61,7 @@ module Gitlab
[10.seconds, 10.minutes]
].freeze
- def initialize(logger: NULL_LOGGER, allow_savepoints: true, timing_configuration: DEFAULT_TIMING_CONFIGURATION, klass: nil, env: ENV)
+ def initialize(logger: NULL_LOGGER, allow_savepoints: true, timing_configuration: DEFAULT_TIMING_CONFIGURATION, klass: nil, env: ENV, connection: ActiveRecord::Base.connection)
@logger = logger
@klass = klass
@allow_savepoints = allow_savepoints
@@ -69,6 +69,7 @@ module Gitlab
@env = env
@current_iteration = 1
@log_params = { method: 'with_lock_retries', class: klass.to_s }
+ @connection = connection
end
# Executes a block of code, retrying it whenever a database lock can't be acquired in time
@@ -96,7 +97,7 @@ module Gitlab
run_block_with_lock_timeout
rescue ActiveRecord::LockWaitTimeout
if retry_with_lock_timeout?
- disable_idle_in_transaction_timeout if ActiveRecord::Base.connection.transaction_open?
+ disable_idle_in_transaction_timeout if connection.transaction_open?
wait_until_next_retry
reset_db_settings
@@ -116,16 +117,16 @@ module Gitlab
private
- attr_reader :logger, :env, :block, :current_iteration, :log_params, :timing_configuration
+ attr_reader :logger, :env, :block, :current_iteration, :log_params, :timing_configuration, :connection
def run_block
block.call
end
def run_block_with_lock_timeout
- raise "WithLockRetries should not run inside already open transaction" if ActiveRecord::Base.connection.transaction_open? && @allow_savepoints.blank?
+ raise "WithLockRetries should not run inside already open transaction" if connection.transaction_open? && @allow_savepoints.blank?
- ActiveRecord::Base.transaction(requires_new: true) do # rubocop:disable Performance/ActiveRecordSubtransactions
+ connection.transaction(requires_new: true) do # rubocop:disable Performance/ActiveRecordSubtransactions
execute("SET LOCAL lock_timeout TO '#{current_lock_timeout_in_ms}ms'")
log(message: 'Lock timeout is set', current_iteration: current_iteration, lock_timeout_in_ms: current_lock_timeout_in_ms)
@@ -152,7 +153,7 @@ module Gitlab
log(message: "Couldn't acquire lock to perform the migration", current_iteration: current_iteration)
log(message: "Executing the migration without lock timeout", current_iteration: current_iteration)
- disable_lock_timeout if ActiveRecord::Base.connection.transaction_open?
+ disable_lock_timeout if connection.transaction_open?
run_block
@@ -168,7 +169,7 @@ module Gitlab
end
def execute(statement)
- ActiveRecord::Base.connection.execute(statement)
+ connection.execute(statement)
end
def retry_count
diff --git a/lib/tasks/gitlab/gitaly.rake b/lib/tasks/gitlab/gitaly.rake
index 6675439e430..ef58c9339f1 100644
--- a/lib/tasks/gitlab/gitaly.rake
+++ b/lib/tasks/gitlab/gitaly.rake
@@ -15,8 +15,7 @@ namespace :gitlab do
gdk_gitaly_dir = ENV.fetch('GDK_GITALY', Rails.root.join('../gitaly'))
# Our test setup expects a git repo, so clone rather than copy
- version = Gitlab::GitalyClient.expected_server_version
- checkout_or_clone_version(version: version, repo: gdk_gitaly_dir, target_dir: args.dir, clone_opts: %w[--depth 1])
+ clone_repo(gdk_gitaly_dir, args.dir, clone_opts: %w[--depth 1]) unless Dir.exist?(args.dir)
# We assume the GDK gitaly already compiled binaries
build_dir = File.join(gdk_gitaly_dir, '_build')
@@ -31,7 +30,7 @@ namespace :gitlab do
FileUtils.cp_r(ruby_bundle_file, args.dir)
gitaly_binary = File.join(build_dir, 'bin', 'gitaly')
- warn_gitaly_out_of_date!(gitaly_binary, version)
+ warn_gitaly_out_of_date!(gitaly_binary, Gitlab::GitalyClient.expected_server_version)
rescue Errno::ENOENT => e
puts "Could not copy files, did you run `gdk update`? Error: #{e.message}"
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index ca93f702588..a43c54298db 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -5830,6 +5830,9 @@ msgstr ""
msgid "Bulk update"
msgstr ""
+msgid "BulkImports|Re-import creates a new group. It does not sync with the existing group."
+msgstr ""
+
msgid "BulkImport|Existing groups"
msgstr ""
@@ -5851,6 +5854,9 @@ msgstr ""
msgid "BulkImport|Importing the group failed"
msgstr ""
+msgid "BulkImport|Last imported to %{link}"
+msgstr ""
+
msgid "BulkImport|Name already exists."
msgstr ""
@@ -27568,6 +27574,9 @@ msgstr ""
msgid "Re-authentication required"
msgstr ""
+msgid "Re-import"
+msgstr ""
+
msgid "Re-request review"
msgstr ""
@@ -33935,7 +33944,7 @@ msgstr ""
msgid "There was a problem updating the keep latest artifacts setting."
msgstr ""
-msgid "There was an error %{message} todo."
+msgid "There was an error %{message} to-do item."
msgstr ""
msgid "There was an error adding a To Do."
diff --git a/qa/qa/page/group/bulk_import.rb b/qa/qa/page/group/bulk_import.rb
index 9ba80abf21c..b9497aeb6e5 100644
--- a/qa/qa/page/group/bulk_import.rb
+++ b/qa/qa/page/group/bulk_import.rb
@@ -7,7 +7,6 @@ module QA
view "app/assets/javascripts/import_entities/import_groups/components/import_table.vue" do
element :import_table
element :import_item
- element :import_group_button
element :import_status_indicator
end
@@ -19,6 +18,10 @@ module QA
element :target_namespace_selector_dropdown
end
+ view "app/assets/javascripts/import_entities/import_groups/components/import_actions_cell.vue" do
+ element :import_group_button
+ end
+
# Import source group in to target group
#
# @param [String] source_group_name
diff --git a/rubocop/cop/migration/prevent_index_creation.rb b/rubocop/cop/migration/prevent_index_creation.rb
index aeeec36ecf0..c383466f73b 100644
--- a/rubocop/cop/migration/prevent_index_creation.rb
+++ b/rubocop/cop/migration/prevent_index_creation.rb
@@ -8,7 +8,7 @@ module RuboCop
class PreventIndexCreation < RuboCop::Cop::Cop
include MigrationHelpers
- FORBIDDEN_TABLES = %i[ci_builds taggings ci_builds_metadata events].freeze
+ FORBIDDEN_TABLES = %i[ci_builds].freeze
MSG = "Adding new index to #{FORBIDDEN_TABLES.join(", ")} is forbidden, see https://gitlab.com/gitlab-org/gitlab/-/issues/332886"
diff --git a/spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js b/spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js
new file mode 100644
index 00000000000..60f0780fdb3
--- /dev/null
+++ b/spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js
@@ -0,0 +1,90 @@
+import { GlButton, GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { STATUSES } from '~/import_entities/constants';
+import ImportActionsCell from '~/import_entities/import_groups/components/import_actions_cell.vue';
+import { generateFakeEntry } from '../graphql/fixtures';
+
+describe('import actions cell', () => {
+ let wrapper;
+
+ const createComponent = (props) => {
+ wrapper = shallowMount(ImportActionsCell, {
+ propsData: {
+ groupPathRegex: /^[a-zA-Z]+$/,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when import status is NONE', () => {
+ beforeEach(() => {
+ const group = generateFakeEntry({ id: 1, status: STATUSES.NONE });
+ createComponent({ group });
+ });
+
+ it('renders import button', () => {
+ const button = wrapper.findComponent(GlButton);
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe('Import');
+ });
+
+ it('does not render icon with a hint', () => {
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(false);
+ });
+ });
+
+ describe('when import status is FINISHED', () => {
+ beforeEach(() => {
+ const group = generateFakeEntry({ id: 1, status: STATUSES.FINISHED });
+ createComponent({ group });
+ });
+
+ it('renders re-import button', () => {
+ const button = wrapper.findComponent(GlButton);
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe('Re-import');
+ });
+
+ it('renders icon with a hint', () => {
+ const icon = wrapper.findComponent(GlIcon);
+ expect(icon.exists()).toBe(true);
+ expect(icon.attributes().title).toBe(
+ 'Re-import creates a new group. It does not sync with the existing group.',
+ );
+ });
+ });
+
+ it('does not render import button when group import is in progress', () => {
+ const group = generateFakeEntry({ id: 1, status: STATUSES.STARTED });
+ createComponent({ group });
+
+ const button = wrapper.findComponent(GlButton);
+ expect(button.exists()).toBe(false);
+ });
+
+ it('renders import button as disabled when there are validation errors', () => {
+ const group = generateFakeEntry({
+ id: 1,
+ status: STATUSES.NONE,
+ validation_errors: [{ field: 'new_name', message: 'something ' }],
+ });
+ createComponent({ group });
+
+ const button = wrapper.findComponent(GlButton);
+ expect(button.props().disabled).toBe(true);
+ });
+
+ it('emits import-group event when import button is clicked', () => {
+ const group = generateFakeEntry({ id: 1, status: STATUSES.NONE });
+ createComponent({ group });
+
+ const button = wrapper.findComponent(GlButton);
+ button.vm.$emit('click');
+
+ expect(wrapper.emitted('import-group')).toHaveLength(1);
+ });
+});
diff --git a/spec/frontend/import_entities/import_groups/components/import_source_cell_spec.js b/spec/frontend/import_entities/import_groups/components/import_source_cell_spec.js
new file mode 100644
index 00000000000..2a56efd1cbb
--- /dev/null
+++ b/spec/frontend/import_entities/import_groups/components/import_source_cell_spec.js
@@ -0,0 +1,59 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { STATUSES } from '~/import_entities/constants';
+import ImportSourceCell from '~/import_entities/import_groups/components/import_source_cell.vue';
+import { generateFakeEntry } from '../graphql/fixtures';
+
+describe('import source cell', () => {
+ let wrapper;
+ let group;
+
+ const createComponent = (props) => {
+ wrapper = shallowMount(ImportSourceCell, {
+ propsData: {
+ ...props,
+ },
+ stubs: { GlSprintf },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when group status is NONE', () => {
+ beforeEach(() => {
+ group = generateFakeEntry({ id: 1, status: STATUSES.NONE });
+ createComponent({ group });
+ });
+
+ it('renders link to a group', () => {
+ const link = wrapper.findComponent(GlLink);
+ expect(link.attributes().href).toBe(group.web_url);
+ expect(link.text()).toContain(group.full_path);
+ });
+
+ it('does not render last imported line', () => {
+ expect(wrapper.text()).not.toContain('Last imported to');
+ });
+ });
+
+ describe('when group status is FINISHED', () => {
+ beforeEach(() => {
+ group = generateFakeEntry({ id: 1, status: STATUSES.FINISHED });
+ createComponent({ group });
+ });
+
+ it('renders link to a group', () => {
+ const link = wrapper.findComponent(GlLink);
+ expect(link.attributes().href).toBe(group.web_url);
+ expect(link.text()).toContain(group.full_path);
+ });
+
+ it('renders last imported line', () => {
+ expect(wrapper.text()).toMatchInterpolatedText(
+ 'fake_group_1 Last imported to root/last-group1',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index bbd8463e685..f43e545e049 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -15,6 +15,7 @@ import stubChildren from 'helpers/stub_children';
import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import { STATUSES } from '~/import_entities/constants';
+import ImportActionsCell from '~/import_entities/import_groups/components/import_actions_cell.vue';
import ImportTable from '~/import_entities/import_groups/components/import_table.vue';
import ImportTargetCell from '~/import_entities/import_groups/components/import_target_cell.vue';
import importGroupsMutation from '~/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql';
@@ -163,11 +164,8 @@ describe('import table', () => {
it('invokes importGroups mutation when row button is clicked', async () => {
jest.spyOn(apolloProvider.defaultClient, 'mutate');
- const triggerImportButton = wrapper
- .findAllComponents(GlButton)
- .wrappers.find((w) => w.text() === 'Import');
- triggerImportButton.vm.$emit('click');
+ wrapper.findComponent(ImportActionsCell).vm.$emit('import-group');
await waitForPromises();
expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith({
mutation: importGroupsMutation,
@@ -329,7 +327,7 @@ describe('import table', () => {
});
it('does not allow selecting already started groups', async () => {
- const NEW_GROUPS = [generateFakeEntry({ id: 1, status: STATUSES.FINISHED })];
+ const NEW_GROUPS = [generateFakeEntry({ id: 1, status: STATUSES.STARTED })];
createComponent({
bulkImportSourceGroups: () => ({
diff --git a/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js b/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js
index 8231297e594..be83a61841f 100644
--- a/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js
@@ -1,14 +1,10 @@
-import { GlButton, GlDropdownItem, GlLink, GlFormInput } from '@gitlab/ui';
+import { GlDropdownItem, GlFormInput } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
-import VueApollo from 'vue-apollo';
import ImportGroupDropdown from '~/import_entities/components/group_dropdown.vue';
import { STATUSES } from '~/import_entities/constants';
import ImportTargetCell from '~/import_entities/import_groups/components/import_target_cell.vue';
import { availableNamespacesFixture } from '../graphql/fixtures';
-Vue.use(VueApollo);
-
const getFakeGroup = (status) => ({
web_url: 'https://fake.host/',
full_path: 'fake_group_1',
@@ -26,9 +22,6 @@ describe('import target cell', () => {
let wrapper;
let group;
- const findByText = (cmp, text) => {
- return wrapper.findAll(cmp).wrappers.find((node) => node.text().indexOf(text) === 0);
- };
const findNameInput = () => wrapper.find(GlFormInput);
const findNamespaceDropdown = () => wrapper.find(ImportGroupDropdown);
@@ -117,10 +110,6 @@ describe('import target cell', () => {
createComponent({ group });
});
- it('does not render Import button', () => {
- expect(findByText(GlButton, 'Import')).toBe(undefined);
- });
-
it('renders namespace dropdown as disabled', () => {
expect(findNamespaceDropdown().attributes('disabled')).toBe('true');
});
@@ -132,17 +121,8 @@ describe('import target cell', () => {
createComponent({ group });
});
- it('does not render Import button', () => {
- expect(findByText(GlButton, 'Import')).toBe(undefined);
- });
-
- it('does not render namespace dropdown', () => {
- expect(findNamespaceDropdown().exists()).toBe(false);
- });
-
- it('renders target as link', () => {
- const TARGET_LINK = `${group.import_target.target_namespace}/${group.import_target.new_name}`;
- expect(findByText(GlLink, TARGET_LINK).exists()).toBe(true);
+ it('renders namespace dropdown as enabled', () => {
+ expect(findNamespaceDropdown().attributes('disabled')).toBe(undefined);
});
});
@@ -179,9 +159,6 @@ describe('import target cell', () => {
},
});
- jest.runOnlyPendingTimers();
- await nextTick();
-
expect(wrapper.text()).toContain(FAKE_ERROR_MESSAGE);
});
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
index ec50dfd037f..e1d65095888 100644
--- a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
@@ -259,6 +259,10 @@ describe('Bulk import resolvers', () => {
target_namespace: 'root',
new_name: 'group1',
},
+ last_import_target: {
+ target_namespace: 'root',
+ new_name: 'group1',
+ },
validation_errors: [],
},
],
@@ -414,19 +418,32 @@ describe('Bulk import resolvers', () => {
});
});
- it('setImportProgress updates group progress', async () => {
+ it('setImportProgress updates group progress and sets import target', async () => {
const NEW_STATUS = 'dummy';
const FAKE_JOB_ID = 5;
+ const IMPORT_TARGET = {
+ __typename: 'ClientBulkImportTarget',
+ new_name: 'fake_name',
+ target_namespace: 'fake_target',
+ };
const {
data: {
- setImportProgress: { progress },
+ setImportProgress: { progress, last_import_target: lastImportTarget },
},
} = await client.mutate({
mutation: setImportProgressMutation,
- variables: { sourceGroupId: GROUP_ID, status: NEW_STATUS, jobId: FAKE_JOB_ID },
+ variables: {
+ sourceGroupId: GROUP_ID,
+ status: NEW_STATUS,
+ jobId: FAKE_JOB_ID,
+ importTarget: IMPORT_TARGET,
+ },
});
- expect(progress).toMatchObject({
+ expect(lastImportTarget).toStrictEqual(IMPORT_TARGET);
+
+ expect(progress).toStrictEqual({
+ __typename: clientTypenames.BulkImportProgress,
id: FAKE_JOB_ID,
status: NEW_STATUS,
});
@@ -442,7 +459,8 @@ describe('Bulk import resolvers', () => {
variables: { id: FAKE_JOB_ID, status: NEW_STATUS },
});
- expect(statusInResponse).toMatchObject({
+ expect(statusInResponse).toStrictEqual({
+ __typename: clientTypenames.BulkImportProgress,
id: FAKE_JOB_ID,
status: NEW_STATUS,
});
@@ -460,7 +478,13 @@ describe('Bulk import resolvers', () => {
variables: { sourceGroupId: GROUP_ID, field: FAKE_FIELD, message: FAKE_MESSAGE },
});
- expect(validationErrors).toMatchObject([{ field: FAKE_FIELD, message: FAKE_MESSAGE }]);
+ expect(validationErrors).toStrictEqual([
+ {
+ __typename: clientTypenames.BulkImportValidationError,
+ field: FAKE_FIELD,
+ message: FAKE_MESSAGE,
+ },
+ ]);
});
it('removeValidationError removes error from group', async () => {
@@ -481,7 +505,7 @@ describe('Bulk import resolvers', () => {
variables: { sourceGroupId: GROUP_ID, field: FAKE_FIELD },
});
- expect(validationErrors).toMatchObject([]);
+ expect(validationErrors).toStrictEqual([]);
});
});
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/fixtures.js b/spec/frontend/import_entities/import_groups/graphql/fixtures.js
index 6f66066b312..d1bd52693b6 100644
--- a/spec/frontend/import_entities/import_groups/graphql/fixtures.js
+++ b/spec/frontend/import_entities/import_groups/graphql/fixtures.js
@@ -9,6 +9,10 @@ export const generateFakeEntry = ({ id, status, ...rest }) => ({
target_namespace: 'root',
new_name: `group${id}`,
},
+ last_import_target: {
+ target_namespace: 'root',
+ new_name: `last-group${id}`,
+ },
id,
progress: {
id: `test-${id}`,
diff --git a/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js b/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js
index bae715edac0..f06babcb149 100644
--- a/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js
@@ -20,7 +20,7 @@ describe('SourceGroupsManager', () => {
describe('storage management', () => {
const IMPORT_ID = 1;
- const IMPORT_TARGET = { destination_name: 'demo', destination_namespace: 'foo' };
+ const IMPORT_TARGET = { new_name: 'demo', target_namespace: 'foo' };
const STATUS = 'FAKE_STATUS';
const FAKE_GROUP = { id: 1, import_target: IMPORT_TARGET, status: STATUS };
diff --git a/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
index 6f6855c8f84..865e892b12d 100644
--- a/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
+++ b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe ResolvesPipelines do
project.add_developer(current_user)
end
- it { is_expected.to have_graphql_arguments(:status, :ref, :sha) }
+ it { is_expected.to have_graphql_arguments(:status, :ref, :sha, :source) }
it 'finds all pipelines' do
expect(resolve_pipelines).to contain_exactly(pipeline, failed_pipeline, ref_pipeline, sha_pipeline)
@@ -45,6 +45,30 @@ RSpec.describe ResolvesPipelines do
expect(resolve_pipelines(sha: 'deadbeef')).to contain_exactly(sha_pipeline)
end
+ context 'filtering by source' do
+ let_it_be(:source_pipeline) { create(:ci_pipeline, project: project, source: 'web') }
+
+ context 'when `dast_view_scans` feature flag is disabled' do
+ before do
+ stub_feature_flags(dast_view_scans: false)
+ end
+
+ it 'does not filter by source' do
+ expect(resolve_pipelines(source: 'web')).to contain_exactly(pipeline, failed_pipeline, ref_pipeline, sha_pipeline, source_pipeline)
+ end
+ end
+
+ context 'when `dast_view_scans` feature flag is enabled' do
+ it 'does filter by source' do
+ expect(resolve_pipelines(source: 'web')).to contain_exactly(source_pipeline)
+ end
+
+ it 'returns all the pipelines' do
+ expect(resolve_pipelines).to contain_exactly(pipeline, failed_pipeline, ref_pipeline, sha_pipeline, source_pipeline)
+ end
+ end
+ end
+
it 'does not return any pipelines if the user does not have access' do
expect(resolve_pipelines({}, {})).to be_empty
end
diff --git a/spec/graphql/types/customer_relations/contact_type_spec.rb b/spec/graphql/types/customer_relations/contact_type_spec.rb
new file mode 100644
index 00000000000..a51ee705fb0
--- /dev/null
+++ b/spec/graphql/types/customer_relations/contact_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CustomerRelationsContact'] do
+ let(:fields) { %i[id organization first_name last_name phone email description created_at updated_at] }
+
+ it { expect(described_class.graphql_name).to eq('CustomerRelationsContact') }
+ it { expect(described_class).to have_graphql_fields(fields) }
+ it { expect(described_class).to require_graphql_authorizations(:read_contact) }
+end
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index 342d5932ba3..35fda69ffe3 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe GitlabSchema.types['Group'] do
dependency_proxy_blobs dependency_proxy_image_count
dependency_proxy_blob_count dependency_proxy_total_size
dependency_proxy_image_prefix shared_runners_setting
- timelogs organizations
+ timelogs organizations contacts
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/lib/gitlab/action_cable/request_store_callbacks_spec.rb b/spec/lib/gitlab/action_cable/request_store_callbacks_spec.rb
index e25b51c6513..3b73252709c 100644
--- a/spec/lib/gitlab/action_cable/request_store_callbacks_spec.rb
+++ b/spec/lib/gitlab/action_cable/request_store_callbacks_spec.rb
@@ -1,5 +1,7 @@
# frozen_string_literal: true
+require 'spec_helper'
+
RSpec.describe Gitlab::ActionCable::RequestStoreCallbacks do
describe '.wrapper' do
it 'enables RequestStore in the inner block' do
diff --git a/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb b/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb
index 89ad0017753..ebbbafb855f 100644
--- a/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb
@@ -1,5 +1,7 @@
# frozen_string_literal: true
+require 'spec_helper'
+
RSpec.describe Gitlab::Database::LoadBalancing::ActionCableCallbacks, :request_store do
describe '.wrapper' do
it 'uses primary and then releases the connection and clears the session' do
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 92485f0be7c..006f8a39f9c 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -798,13 +798,13 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
# This spec runs without an enclosing transaction (:delete truncation method for db_cleaner)
context 'when the statement_timeout is already disabled', :delete do
before do
- ActiveRecord::Base.connection.execute('SET statement_timeout TO 0')
+ ActiveRecord::Migration.connection.execute('SET statement_timeout TO 0')
end
after do
- # Use ActiveRecord::Base.connection instead of model.execute
+ # Use ActiveRecord::Migration.connection instead of model.execute
# so that this call is not counted below
- ActiveRecord::Base.connection.execute('RESET statement_timeout')
+ ActiveRecord::Migration.connection.execute('RESET statement_timeout')
end
it 'yields control without disabling the timeout or resetting' do
@@ -954,10 +954,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
let(:trigger_name) { model.rename_trigger_name(:users, :id, :new) }
let(:user) { create(:user) }
let(:copy_trigger) { double('copy trigger') }
+ let(:connection) { ActiveRecord::Migration.connection }
before do
expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
- .with(:users).and_return(copy_trigger)
+ .with(:users, connection: connection).and_return(copy_trigger)
end
it 'copies the value to the new column using the type_cast_function', :aggregate_failures do
@@ -1300,11 +1301,13 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
describe '#install_rename_triggers' do
+ let(:connection) { ActiveRecord::Migration.connection }
+
it 'installs the triggers' do
copy_trigger = double('copy trigger')
expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
- .with(:users).and_return(copy_trigger)
+ .with(:users, connection: connection).and_return(copy_trigger)
expect(copy_trigger).to receive(:create).with(:old, :new, trigger_name: 'foo')
@@ -1313,11 +1316,13 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
describe '#remove_rename_triggers' do
+ let(:connection) { ActiveRecord::Migration.connection }
+
it 'removes the function and trigger' do
copy_trigger = double('copy trigger')
expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
- .with('bar').and_return(copy_trigger)
+ .with('bar', connection: connection).and_return(copy_trigger)
expect(copy_trigger).to receive(:drop).with('foo')
@@ -2194,7 +2199,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#index_exists_by_name?' do
it 'returns true if an index exists' do
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE INDEX test_index_for_index_exists ON projects (path);'
)
@@ -2209,7 +2214,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'when an index with a function exists' do
before do
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE INDEX test_index ON projects (LOWER(path));'
)
end
@@ -2222,15 +2227,15 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'when an index exists for a table with the same name in another schema' do
before do
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE SCHEMA new_test_schema'
)
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
)
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE INDEX test_index_on_name ON new_test_schema.projects (LOWER(name));'
)
end
@@ -2463,19 +2468,19 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#check_constraint_exists?' do
before do
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID'
)
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE SCHEMA new_test_schema'
)
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
)
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'ALTER TABLE new_test_schema.projects ADD CONSTRAINT check_2 CHECK (char_length(name) <= 5)'
)
end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 0d8f42aa7a2..0b960830d89 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
let(:logger) { Gitlab::Database::WithLockRetries::NULL_LOGGER }
let(:subject) { described_class.new(env: env, logger: logger, allow_savepoints: allow_savepoints, timing_configuration: timing_configuration) }
let(:allow_savepoints) { true }
+ let(:connection) { ActiveRecord::Base.connection }
let(:timing_configuration) do
[
@@ -67,7 +68,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
WHERE t.relkind = 'r' AND l.mode = 'ExclusiveLock' AND t.relname = '#{Project.table_name}'
"""
- expect(ActiveRecord::Base.connection.execute(check_exclusive_lock_query).to_a).to be_present
+ expect(connection.execute(check_exclusive_lock_query).to_a).to be_present
end
end
@@ -96,8 +97,8 @@ RSpec.describe Gitlab::Database::WithLockRetries do
lock_fiber.resume
end
- ActiveRecord::Base.transaction do
- ActiveRecord::Base.connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+ connection.transaction do
+ connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
lock_acquired = true
end
end
@@ -115,7 +116,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'setting the idle transaction timeout' do
context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
it 'does not disable the idle transaction timeout' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(connection).to receive(:transaction_open?).and_return(false)
allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
allow(subject).to receive(:run_block_with_lock_timeout).once
@@ -127,7 +128,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
it 'disables the idle transaction timeout so the code can sleep and retry' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+ allow(connection).to receive(:transaction_open?).and_return(true)
n = 0
allow(subject).to receive(:run_block_with_lock_timeout).twice do
@@ -152,7 +153,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
it 'does not disable the lock_timeout' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(connection).to receive(:transaction_open?).and_return(false)
allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
expect(subject).not_to receive(:disable_lock_timeout)
@@ -163,7 +164,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
it 'disables the lock_timeout' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+ allow(connection).to receive(:transaction_open?).and_return(true)
allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
expect(subject).to receive(:disable_lock_timeout)
@@ -198,8 +199,8 @@ RSpec.describe Gitlab::Database::WithLockRetries do
subject.run(raise_on_exhaustion: true) do
lock_attempts += 1
- ActiveRecord::Base.transaction do
- ActiveRecord::Base.connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+ connection.transaction do
+ connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
lock_acquired = true
end
end
@@ -213,11 +214,11 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when statement timeout is reached' do
it 'raises QueryCanceled error' do
lock_acquired = false
- ActiveRecord::Base.connection.execute("SET LOCAL statement_timeout='100ms'")
+ connection.execute("SET LOCAL statement_timeout='100ms'")
expect do
subject.run do
- ActiveRecord::Base.connection.execute("SELECT 1 FROM pg_sleep(0.11)") # 110ms
+ connection.execute("SELECT 1 FROM pg_sleep(0.11)") # 110ms
lock_acquired = true
end
end.to raise_error(ActiveRecord::QueryCanceled)
@@ -230,11 +231,11 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'restore local database variables' do
it do
- expect { subject.run {} }.not_to change { ActiveRecord::Base.connection.execute("SHOW lock_timeout").to_a }
+ expect { subject.run {} }.not_to change { connection.execute("SHOW lock_timeout").to_a }
end
it do
- expect { subject.run {} }.not_to change { ActiveRecord::Base.connection.execute("SHOW idle_in_transaction_session_timeout").to_a }
+ expect { subject.run {} }.not_to change { connection.execute("SHOW idle_in_transaction_session_timeout").to_a }
end
end
@@ -242,10 +243,10 @@ RSpec.describe Gitlab::Database::WithLockRetries do
let(:timing_configuration) { [[0.015.seconds, 0.025.seconds], [0.015.seconds, 0.025.seconds]] } # 15ms, 25ms
it 'executes `SET LOCAL lock_timeout` using the configured timeout value in milliseconds' do
- expect(ActiveRecord::Base.connection).to receive(:execute).with("RESET idle_in_transaction_session_timeout; RESET lock_timeout").and_call_original
- expect(ActiveRecord::Base.connection).to receive(:execute).with("SAVEPOINT active_record_1", "TRANSACTION").and_call_original
- expect(ActiveRecord::Base.connection).to receive(:execute).with("SET LOCAL lock_timeout TO '15ms'").and_call_original
- expect(ActiveRecord::Base.connection).to receive(:execute).with("RELEASE SAVEPOINT active_record_1", "TRANSACTION").and_call_original
+ expect(connection).to receive(:execute).with("RESET idle_in_transaction_session_timeout; RESET lock_timeout").and_call_original
+ expect(connection).to receive(:execute).with("SAVEPOINT active_record_1", "TRANSACTION").and_call_original
+ expect(connection).to receive(:execute).with("SET LOCAL lock_timeout TO '15ms'").and_call_original
+ expect(connection).to receive(:execute).with("RELEASE SAVEPOINT active_record_1", "TRANSACTION").and_call_original
subject.run { }
end
@@ -262,13 +263,13 @@ RSpec.describe Gitlab::Database::WithLockRetries do
let(:allow_savepoints) { false }
it 'prevents running inside already open transaction' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+ allow(connection).to receive(:transaction_open?).and_return(true)
expect { subject.run { } }.to raise_error(/should not run inside already open transaction/)
end
it 'does not raise the error if not inside open transaction' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(connection).to receive(:transaction_open?).and_return(false)
expect { subject.run { } }.not_to raise_error
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index a839d400b9d..8e6be32ea3c 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -2644,6 +2644,16 @@ RSpec.describe Group do
end
end
+ describe '.contacts' do
+ it 'returns contacts belonging to the group' do
+ contact1 = create(:contact, group: group)
+ create(:contact)
+ contact3 = create(:contact, group: group)
+
+ expect(group.contacts).to contain_exactly(contact1, contact3)
+ end
+ end
+
describe '#to_ability_name' do
it 'returns group' do
group = build(:group)
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 4675f08dd34..95144fae9ce 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe GroupPolicy do
it do
expect_allowed(:read_group)
expect_allowed(:read_organization)
+ expect_allowed(:read_contact)
expect_allowed(:read_counts)
expect_allowed(*read_group_permissions)
expect_disallowed(:upload_file)
@@ -33,6 +34,7 @@ RSpec.describe GroupPolicy do
it { expect_disallowed(:read_group) }
it { expect_disallowed(:read_organization) }
+ it { expect_disallowed(:read_contact) }
it { expect_disallowed(:read_counts) }
it { expect_disallowed(*read_group_permissions) }
end
@@ -47,6 +49,7 @@ RSpec.describe GroupPolicy do
it { expect_disallowed(:read_group) }
it { expect_disallowed(:read_organization) }
+ it { expect_disallowed(:read_contact) }
it { expect_disallowed(:read_counts) }
it { expect_disallowed(*read_group_permissions) }
end
@@ -903,6 +906,7 @@ RSpec.describe GroupPolicy do
it { is_expected.to be_allowed(:read_package) }
it { is_expected.to be_allowed(:read_group) }
it { is_expected.to be_allowed(:read_organization) }
+ it { is_expected.to be_allowed(:read_contact) }
it { is_expected.to be_disallowed(:create_package) }
end
@@ -913,6 +917,7 @@ RSpec.describe GroupPolicy do
it { is_expected.to be_allowed(:read_package) }
it { is_expected.to be_allowed(:read_group) }
it { is_expected.to be_allowed(:read_organization) }
+ it { is_expected.to be_allowed(:read_contact) }
it { is_expected.to be_disallowed(:destroy_package) }
end
end
diff --git a/spec/rubocop/cop/migration/prevent_index_creation_spec.rb b/spec/rubocop/cop/migration/prevent_index_creation_spec.rb
index b5bb770553a..ed7c8974d8d 100644
--- a/spec/rubocop/cop/migration/prevent_index_creation_spec.rb
+++ b/spec/rubocop/cop/migration/prevent_index_creation_spec.rb
@@ -6,7 +6,7 @@ require_relative '../../../../rubocop/cop/migration/prevent_index_creation'
RSpec.describe RuboCop::Cop::Migration::PreventIndexCreation do
subject(:cop) { described_class.new }
- let(:forbidden_tables) { %w(ci_builds taggings ci_builds_metadata events) }
+ let(:forbidden_tables) { %w(ci_builds) }
let(:forbidden_tables_list) { forbidden_tables.join(', ') }
context 'when in migration' do
diff --git a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
index 4d328c03641..a854a871034 100644
--- a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
@@ -31,6 +31,125 @@ RSpec.shared_examples 'namespace traversal scopes' do
it { expect(subject.where_values_hash).not_to have_key(:type) }
end
+ describe '.order_by_depth' do
+ subject { described_class.where(id: [group_1, nested_group_1, deep_nested_group_1]).order_by_depth(direction) }
+
+ context 'ascending' do
+ let(:direction) { :asc }
+
+ it { is_expected.to eq [deep_nested_group_1, nested_group_1, group_1] }
+ end
+
+ context 'descending' do
+ let(:direction) { :desc }
+
+ it { is_expected.to eq [group_1, nested_group_1, deep_nested_group_1] }
+ end
+ end
+
+ describe '.normal_select' do
+ let(:query_result) { described_class.where(id: group_1).normal_select }
+
+ subject { query_result.column_names }
+
+ it { is_expected.to eq described_class.column_names }
+ end
+
+ shared_examples '.self_and_ancestors' do
+ subject { described_class.where(id: [nested_group_1, nested_group_2]).self_and_ancestors }
+
+ it { is_expected.to contain_exactly(group_1, nested_group_1, group_2, nested_group_2) }
+
+ context 'when include_self is false' do
+ subject { described_class.where(id: [nested_group_1, nested_group_2]).self_and_ancestors(include_self: false) }
+
+ it { is_expected.to contain_exactly(group_1, group_2) }
+ end
+
+ context 'when hierarchy_order is ascending' do
+ subject { described_class.where(id: [nested_group_1, nested_group_2]).self_and_ancestors(hierarchy_order: :asc) }
+
+ it { is_expected.to eq [nested_group_1, nested_group_2, group_1, group_2] }
+ end
+
+ context 'when hierarchy_order is descending' do
+ subject { described_class.where(id: [nested_group_1, nested_group_2]).self_and_ancestors(hierarchy_order: :desc) }
+
+ it { is_expected.to eq [group_1, group_2, nested_group_1, nested_group_2] }
+ end
+ end
+
+ describe '.self_and_ancestors' do
+ context "use_traversal_ids_ancestor_scopes feature flag is true" do
+ before do
+ stub_feature_flags(use_traversal_ids: true)
+ stub_feature_flags(use_traversal_ids_for_ancestor_scopes: true)
+ end
+
+ it_behaves_like '.self_and_ancestors'
+
+ it 'not make recursive queries' do
+ expect { described_class.where(id: [nested_group_1]).self_and_ancestors.load }.not_to make_queries_matching(/WITH RECURSIVE/)
+ end
+ end
+
+ context "use_traversal_ids_ancestor_scopes feature flag is false" do
+ before do
+ stub_feature_flags(use_traversal_ids_for_ancestor_scopes: false)
+ end
+
+ it_behaves_like '.self_and_ancestors'
+
+ it 'make recursive queries' do
+ expect { described_class.where(id: [nested_group_1]).self_and_ancestors.load }.to make_queries_matching(/WITH RECURSIVE/)
+ end
+ end
+ end
+
+ shared_examples '.self_and_ancestor_ids' do
+ subject { described_class.where(id: [nested_group_1, nested_group_2]).self_and_ancestor_ids.pluck(:id) }
+
+ it { is_expected.to contain_exactly(group_1.id, nested_group_1.id, group_2.id, nested_group_2.id) }
+
+ context 'when include_self is false' do
+ subject do
+ described_class
+ .where(id: [nested_group_1, nested_group_2])
+ .self_and_ancestor_ids(include_self: false)
+ .pluck(:id)
+ end
+
+ it { is_expected.to contain_exactly(group_1.id, group_2.id) }
+ end
+ end
+
+ describe '.self_and_ancestor_ids' do
+ context "use_traversal_ids_ancestor_scopes feature flag is true" do
+ before do
+ stub_feature_flags(use_traversal_ids: true)
+ stub_feature_flags(use_traversal_ids_for_ancestor_scopes: true)
+ end
+
+ it_behaves_like '.self_and_ancestor_ids'
+
+ it 'make recursive queries' do
+ expect { described_class.where(id: [nested_group_1]).self_and_ancestor_ids.load }.not_to make_queries_matching(/WITH RECURSIVE/)
+ end
+ end
+
+ context "use_traversal_ids_ancestor_scopes feature flag is false" do
+ before do
+ stub_feature_flags(use_traversal_ids_for_ancestor_scopes: false)
+ end
+
+ it_behaves_like '.self_and_ancestor_ids'
+
+ it 'make recursive queries' do
+ expect { described_class.where(id: [nested_group_1]).self_and_ancestor_ids.load }.to make_queries_matching(/WITH RECURSIVE/)
+ end
+ end
+ end
+
describe '.self_and_descendants' do
subject { described_class.where(id: [nested_group_1, nested_group_2]).self_and_descendants }