Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot
2025-04-22 21:11:46 +00:00
parent 0c0ac7652b
commit 940dfdfee4
66 changed files with 1306 additions and 96 deletions

View File

@ -104,9 +104,7 @@ export default {
<template>
<gl-popover :target="target" boundary="viewport" placement="top" :show="show">
<gl-skeleton-loader v-if="$apollo.queries.workItem.loading" :height="15">
<rect width="250" height="15" rx="4" />
</gl-skeleton-loader>
<gl-skeleton-loader v-if="$apollo.queries.workItem.loading" :width="150" />
<template v-else>
<div class="gl-flex gl-items-center gl-gap-2">
<status-badge :state="workItem.state" />

View File

@ -115,8 +115,7 @@ export default {
return escape(suggestion);
},
isDuoFirstReviewComment() {
// Must be a Duo bot comment of type DiffNote
if (this.note.author.user_type !== 'duo_code_review_bot' || this.note.type !== 'DiffNote') {
if (this.note.author.user_type !== 'duo_code_review_bot') {
return false;
}
// Get the discussion
@ -124,8 +123,11 @@ export default {
// If can't get discussion or this is not the first note, don't show feedback
return discussion?.notes?.length > 0 && discussion.notes[0].id === this.note.id;
},
isDiffNote() {
return this.note.type === 'DiffNote';
},
defaultAwardsList() {
return this.isDuoFirstReviewComment ? ['thumbsup', 'thumbsdown'] : [];
return this.isDuoFirstReviewComment && this.isDiffNote ? ['thumbsup', 'thumbsdown'] : [];
},
duoFeedbackText() {
return sprintf(
@ -223,7 +225,7 @@ export default {
/>
<div v-else v-safe-html:[$options.safeHtmlConfig]="note.note_html" class="note-text md"></div>
<duo-code-review-feedback
v-if="note.author.user_type === 'duo_code_review_bot' && note.type !== 'DiffNote'"
v-if="isDuoFirstReviewComment && !isDiffNote"
class="gl-mt-3"
data-testid="code-review-feedback"
/>
@ -261,12 +263,12 @@ export default {
class="note_edited_ago"
/>
<div
v-if="isDuoFirstReviewComment"
v-if="isDuoFirstReviewComment && isDiffNote"
v-safe-html:[$options.safeHtmlConfig]="duoFeedbackText"
class="gl-text-md gl-mt-4 gl-text-gray-500"
></div>
<note-awards-list
v-if="isDuoFirstReviewComment || (note.award_emoji && note.award_emoji.length)"
v-if="defaultAwardsList.length || (note.award_emoji && note.award_emoji.length)"
:note-id="note.id"
:note-author-id="note.author.id"
:awards="note.award_emoji"

View File

@ -234,6 +234,7 @@ export default {
<input
:id="inputId"
type="hidden"
:data-testid="inputName"
:name="inputName"
:value="selectedNamespace.id || userNamespaceUniqueId"
/>

View File

@ -1,6 +1,7 @@
<script>
import { GlButton } from '@gitlab/ui';
import { unionBy } from 'lodash';
import fuzzaldrinPlus from 'fuzzaldrin-plus';
import { sortNameAlphabetically, newWorkItemId } from '~/work_items/utils';
import currentUserQuery from '~/graphql_shared/queries/current_user.query.graphql';
import usersSearchQuery from '~/graphql_shared/queries/workspace_autocomplete_users.query.graphql';
@ -163,8 +164,18 @@ export default {
{ options: unselectedUsers, text: __('All users'), textSrOnly: true },
];
}
const filteredParticipants = fuzzaldrinPlus.filter(
// We're storing `name` and `username` as a combined string in
// a new key `matcher` as fuzzaldrin-plus doesn't support searching
// using multiple keys.
this.participants.map((p) => ({ ...p, matcher: `${p.name} ${p.username}` })),
this.searchKey,
{
key: ['matcher'],
},
);
return this.users.map((user) => ({
return unionBy([...filteredParticipants, ...this.users], 'id').map((user) => ({
...user,
value: user?.id,
text: user?.name,

View File

@ -0,0 +1,5 @@
query workItemParent($fullPath: ID!) {
namespace(fullPath: $fullPath) {
id
}
}

View File

@ -27,6 +27,7 @@ export const initWorkItemsRoot = ({ workItemType, workspaceType, withTabs } = {}
const {
canAdminLabel,
canBulkUpdate,
fullPath,
groupPath,
groupId,
@ -36,6 +37,7 @@ export const initWorkItemsRoot = ({ workItemType, workspaceType, withTabs } = {}
labelsManagePath,
registerPath,
signInPath,
hasGroupBulkEditFeature,
hasIterationsFeature,
hasOkrsFeature,
hasSubepicsFeature,
@ -47,7 +49,7 @@ export const initWorkItemsRoot = ({ workItemType, workspaceType, withTabs } = {}
isSignedIn,
workItemType: listWorkItemType,
hasEpicsFeature,
showNewIssueLink,
showNewWorkItem,
canCreateEpic,
autocompleteAwardEmojisPath,
hasScopedLabelsFeature,
@ -115,10 +117,12 @@ export const initWorkItemsRoot = ({ workItemType, workspaceType, withTabs } = {}
router,
apolloProvider,
provide: {
canAdminLabel,
canAdminLabel: parseBoolean(canAdminLabel),
canBulkUpdate: parseBoolean(canBulkUpdate),
fullPath,
isGroup,
isProject: !isGroup,
hasGroupBulkEditFeature: parseBoolean(hasGroupBulkEditFeature),
hasIssueWeightsFeature: parseBoolean(hasIssueWeightsFeature),
hasOkrsFeature: parseBoolean(hasOkrsFeature),
hasSubepicsFeature: parseBoolean(hasSubepicsFeature),
@ -136,7 +140,7 @@ export const initWorkItemsRoot = ({ workItemType, workspaceType, withTabs } = {}
isSignedIn: parseBoolean(isSignedIn),
workItemType: listWorkItemType,
hasEpicsFeature: parseBoolean(hasEpicsFeature),
showNewIssueLink: parseBoolean(showNewIssueLink),
showNewWorkItem: parseBoolean(showNewWorkItem),
canCreateEpic: parseBoolean(canCreateEpic),
autocompleteAwardEmojisPath,
hasQualityManagementFeature: parseBoolean(hasQualityManagementFeature),

View File

@ -1,12 +1,10 @@
<script>
import { GlButton, GlFilteredSearchToken, GlLoadingIcon } from '@gitlab/ui';
import { isEmpty } from 'lodash';
import { createAlert } from '~/alert';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import IssueCardStatistics from 'ee_else_ce/issues/list/components/issue_card_statistics.vue';
import IssueCardTimeInfo from 'ee_else_ce/issues/list/components/issue_card_time_info.vue';
import WorkItemHealthStatus from '~/work_items/components/work_item_health_status.vue';
import CreateWorkItemModal from '~/work_items/components/create_work_item_modal.vue';
import EmptyStateWithoutAnyIssues from '~/issues/list/components/empty_state_without_any_issues.vue';
import {
convertToApiParams,
convertToSearchQuery,
@ -27,6 +25,7 @@ import {
WORKSPACE_PROJECT,
} from '~/issues/constants';
import { AutocompleteCache } from '~/issues/dashboard/utils';
import EmptyStateWithoutAnyIssues from '~/issues/list/components/empty_state_without_any_issues.vue';
import {
CREATED_DESC,
PARAM_FIRST_PAGE_SIZE,
@ -39,8 +38,9 @@ import {
import searchLabelsQuery from '~/issues/list/queries/search_labels.query.graphql';
import setSortPreferenceMutation from '~/issues/list/queries/set_sort_preference.mutation.graphql';
import { fetchPolicies } from '~/lib/graphql';
import { scrollUp } from '~/lib/utils/scroll_utils';
import { isPositiveInteger } from '~/lib/utils/number_utils';
import { scrollUp } from '~/lib/utils/scroll_utils';
import { getParameterByName, removeParams, updateHistory } from '~/lib/utils/url_utility';
import { __, s__ } from '~/locale';
import {
OPERATOR_IS,
@ -78,13 +78,16 @@ import {
TOKEN_TYPE_TYPE,
TOKEN_TYPE_UPDATED,
} from '~/vue_shared/components/filtered_search_bar/constants';
import DateToken from '~/vue_shared/components/filtered_search_bar/tokens/date_token.vue';
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
import WorkItemDrawer from '~/work_items/components/work_item_drawer.vue';
import { DEFAULT_PAGE_SIZE, issuableListTabs } from '~/vue_shared/issuable/list/constants';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import DateToken from '~/vue_shared/components/filtered_search_bar/tokens/date_token.vue';
import { getParameterByName, removeParams, updateHistory } from '~/lib/utils/url_utility';
import CreateWorkItemModal from '../components/create_work_item_modal.vue';
import WorkItemBulkEditSidebar from '../components/work_item_bulk_edit/work_item_bulk_edit_sidebar.vue';
import WorkItemHealthStatus from '../components/work_item_health_status.vue';
import WorkItemDrawer from '../components/work_item_drawer.vue';
import {
BASE_ALLOWED_CREATE_TYPES,
DETAIL_VIEW_QUERY_PARAM_NAME,
NAME_TO_ENUM_MAP,
STATE_CLOSED,
@ -92,9 +95,13 @@ import {
WORK_ITEM_TYPE_ENUM_EPIC,
WORK_ITEM_TYPE_ENUM_ISSUE,
WORK_ITEM_TYPE_NAME_EPIC,
WORK_ITEM_TYPE_NAME_KEY_RESULT,
WORK_ITEM_TYPE_NAME_OBJECTIVE,
} from '../constants';
import getWorkItemsQuery from '../graphql/list/get_work_items.query.graphql';
import getWorkItemStateCountsQuery from '../graphql/list/get_work_item_state_counts.query.graphql';
import getWorkItemsQuery from '../graphql/list/get_work_items.query.graphql';
import workItemBulkUpdateMutation from '../graphql/list/work_item_bulk_update.mutation.graphql';
import workItemParent from '../graphql/list/work_item_parent.query.graphql';
import { sortOptions, urlSortParams } from './list/constants';
const EmojiToken = () =>
@ -122,6 +129,7 @@ export default {
IssuableList,
IssueCardStatistics,
IssueCardTimeInfo,
WorkItemBulkEditSidebar,
WorkItemDrawer,
WorkItemHealthStatus,
EmptyStateWithoutAnyIssues,
@ -131,15 +139,19 @@ export default {
mixins: [glFeatureFlagMixin()],
inject: [
'autocompleteAwardEmojisPath',
'canBulkUpdate',
'canBulkEditEpics',
'fullPath',
'hasEpicsFeature',
'hasGroupBulkEditFeature',
'hasIssueDateFilterFeature',
'hasOkrsFeature',
'hasQualityManagementFeature',
'initialSort',
'isGroup',
'isSignedIn',
'showNewWorkItem',
'workItemType',
'hasIssueDateFilterFeature',
],
props: {
eeWorkItemUpdateCount: {
@ -147,11 +159,6 @@ export default {
required: false,
default: 0,
},
showBulkEditSidebar: {
type: Boolean,
required: false,
default: false,
},
withTabs: {
type: Boolean,
required: false,
@ -171,12 +178,15 @@ export default {
data() {
return {
error: undefined,
bulkEditInProgress: false,
filterTokens: [],
hasAnyIssues: false,
isInitialLoadComplete: false,
pageInfo: {},
pageParams: {},
pageSize: DEFAULT_PAGE_SIZE,
parentId: undefined,
showBulkEditSidebar: false,
sortKey: CREATED_DESC,
state: STATUS_OPEN,
workItems: [],
@ -244,11 +254,49 @@ export default {
Sentry.captureException(error);
},
},
parentId: {
query: workItemParent,
variables() {
return {
fullPath: this.fullPath,
};
},
update(data) {
return data.namespace.id;
},
skip() {
return !this.showBulkEditSidebar;
},
},
},
computed: {
isItemSelected() {
return !isEmpty(this.activeItem);
},
allowBulkEditing() {
if (this.isEpicsList) {
return this.canBulkEditEpics;
}
if (this.isGroup) {
return this.canBulkUpdate && this.hasGroupBulkEditFeature;
}
return this.canBulkUpdate;
},
// TODO: delete once https://gitlab.com/gitlab-org/gitlab/-/merge_requests/185081 is merged
allowedWorkItemTypes() {
if (this.isGroup) {
return [];
}
if (this.glFeatures.okrsMvc && this.hasOkrsFeature) {
return BASE_ALLOWED_CREATE_TYPES.concat(
WORK_ITEM_TYPE_NAME_KEY_RESULT,
WORK_ITEM_TYPE_NAME_OBJECTIVE,
);
}
return BASE_ALLOWED_CREATE_TYPES;
},
apiFilterParams() {
return convertToApiParams(this.filterTokens);
},
@ -700,6 +748,36 @@ export default {
this.$router.push({ query: this.urlParams });
},
async handleWorkItemBulkEdit({ ids, addLabelIds, removeLabelIds }) {
this.bulkEditInProgress = true;
try {
await this.$apollo.mutate({
mutation: workItemBulkUpdateMutation,
variables: {
input: {
parentId: this.parentId,
ids,
labelsWidget: {
addLabelIds,
removeLabelIds,
},
},
},
});
await this.refetchItems();
} catch (error) {
createAlert({
message: s__('WorkItem|Something went wrong while bulk editing.'),
captureError: true,
error,
});
} finally {
this.bulkEditInProgress = false;
this.showBulkEditSidebar = false;
}
},
saveSortPreference(sortKey) {
this.$apollo
.mutate({
@ -870,13 +948,31 @@ export default {
@select-issuable="handleToggle"
>
<template #nav-actions>
<gl-button
v-if="enableClientSideBoardsExperiment"
data-testid="show-local-board-button"
@click="showLocalBoard = true"
>{{ __('Launch board') }}</gl-button
>
<slot name="nav-actions"> </slot>
<div class="gl-flex gl-gap-3">
<gl-button
v-if="enableClientSideBoardsExperiment"
data-testid="show-local-board-button"
@click="showLocalBoard = true"
>
{{ __('Launch board') }}
</gl-button>
<gl-button
v-if="allowBulkEditing"
:disabled="showBulkEditSidebar"
data-testid="bulk-edit-start-button"
@click="showBulkEditSidebar = true"
>
{{ __('Bulk edit') }}
</gl-button>
<create-work-item-modal
v-if="showNewWorkItem"
:allowed-work-item-types="allowedWorkItemTypes"
:always-show-work-item-type-select="!isEpicsList"
:is-group="isGroup"
:preselected-work-item-type="preselectedWorkItemType"
@workItemCreated="refetchItems"
/>
</div>
</template>
<template #timeframe="{ issuable = {} }">
@ -900,11 +996,27 @@ export default {
</template>
<template #bulk-edit-actions="{ checkedIssuables }">
<slot name="bulk-edit-actions" :checked-issuables="checkedIssuables"></slot>
<gl-button
:disabled="!checkedIssuables.length || bulkEditInProgress"
form="work-item-list-bulk-edit"
:loading="bulkEditInProgress"
type="submit"
variant="confirm"
>
{{ __('Update selected') }}
</gl-button>
<gl-button class="gl-float-right" @click="showBulkEditSidebar = false">
{{ __('Cancel') }}
</gl-button>
</template>
<template #sidebar-items="{ checkedIssuables }">
<slot name="sidebar-items" :checked-issuables="checkedIssuables"></slot>
<work-item-bulk-edit-sidebar
:checked-items="checkedIssuables"
:full-path="fullPath"
:is-group="isGroup"
@bulk-update="handleWorkItemBulkEdit"
/>
</template>
<template #health-status="{ issuable = {} }">

View File

@ -9,6 +9,7 @@ module WorkItemsHelper
{
autocomplete_award_emojis_path: autocomplete_award_emojis_path,
can_admin_label: can?(current_user, :admin_label, resource_parent).to_s,
can_bulk_update: can?(current_user, :admin_issue, resource_parent).to_s,
full_path: resource_parent.full_path,
group_path: group&.full_path,
issues_list_path:
@ -22,7 +23,7 @@ module WorkItemsHelper
default_branch: resource_parent.is_a?(Project) ? resource_parent.default_branch_or_main : nil,
initial_sort: current_user&.user_preference&.issues_sort,
is_signed_in: current_user.present?.to_s,
show_new_issue_link: can?(current_user, :create_work_item, group).to_s,
show_new_work_item: can?(current_user, :create_work_item, group).to_s,
can_create_projects: can?(current_user, :create_projects, group).to_s,
new_project_path: new_project_path(namespace_id: group&.id),
group_id: group&.id,

View File

@ -5,4 +5,10 @@ class ProtectedBranch::PushAccessLevel < ApplicationRecord
include ProtectedRefDeployKeyAccess
# default value for the access_level column
GITLAB_DEFAULT_ACCESS_LEVEL = Gitlab::Access::MAINTAINER
ignore_column :id_convert_to_bigint, remove_with: '18.3', remove_after: '2025-07-06'
ignore_column :protected_branch_id_convert_to_bigint, remove_with: '18.3', remove_after: '2025-07-06'
ignore_column :user_id_convert_to_bigint, remove_with: '18.3', remove_after: '2025-07-06'
ignore_column :group_id_convert_to_bigint, remove_with: '18.3', remove_after: '2025-07-06'
ignore_column :deploy_key_id_convert_to_bigint, remove_with: '18.3', remove_after: '2025-07-06'
end

View File

@ -74,14 +74,6 @@
-# haml-lint:disable InlineJavaScript
%script#js-confidential-issue-data{ type: "application/json" }= { is_confidential: issuable_sidebar[:confidential], is_editable: can_edit_issuable }.to_json.html_safe
-# REMOVE BLOCK - cf. https://gitlab.com/gitlab-org/gitlab/-/merge_requests/172577
.block
.hide-collapsed.gl-flex.gl-items-center.gl-font-bold.gl-leading-20.gl-text-default
= _('Confidentiality')
.hide-collapsed.gl-text-subtle
= _("Confidentiality controls have moved to the issue actions menu (%{icon}) at the top of the page.").html_safe % { icon: sprite_icon('ellipsis_v', size: 12, css_class: 'gl-align-middle') }
-# END REMOVE BLOCK
= render_if_exists 'shared/issuable/sidebar_cve_id_request', issuable_sidebar: issuable_sidebar
.js-sidebar-participants-widget-root

View File

@ -0,0 +1,8 @@
name: load_balancer_replace_hosts
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/497226
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/170129
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/534523
milestone: '18.0'
group: group::database frameworks
type: gitlab_com_derisk
default_enabled: false

View File

@ -0,0 +1,10 @@
---
migration_job_name: BackfillProtectedBranchPushAccessLevelsFields
description: Backfills sharding key `protected_branch_push_access_levels.protected_branch_project_id`,
`protected_branch_push_access_levels.protected_branch_namespace_id` from `protected_branches` and
convert int columns to bigint.
feature_category: source_code_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/175714
milestone: '18.0'
queued_migration_version: 20250414090268
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,8 @@
---
migration_job_name: FixStringConfigHashesGroupStreamingDestinations
description: Fixes any string config hashes to jsonb
feature_category: audit_events
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/187558
milestone: '18.0'
queued_migration_version: 20250409141444
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,8 @@
---
migration_job_name: FixStringConfigHashesInstanceStreamingDestinations
description: Fixes any string config hashes to jsonb
feature_category: audit_events
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/187558
milestone: '18.0'
queued_migration_version: 20250409141503
finalized_by: # version of the migration that finalized this BBM

View File

@ -26,3 +26,5 @@ desired_sharding_key:
sharding_key: namespace_id
belongs_to: protected_branch
table_size: small
desired_sharding_key_migration_job_name:
- BackfillProtectedBranchPushAccessLevelsFields

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProtectedBranchProjectIdToProtectedBranchPushAccessLevels < Gitlab::Database::Migration[2.2]
milestone '18.0'
def change
add_column :protected_branch_push_access_levels, :protected_branch_project_id, :bigint
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProtectedBranchNamespaceIdToProtectedBranchPushAccessLevels < Gitlab::Database::Migration[2.2]
milestone '18.0'
def change
add_column :protected_branch_push_access_levels, :protected_branch_namespace_id, :bigint
end
end

View File

@ -3,12 +3,10 @@
class DropNotNullConstraintFromMergeRequestDiffCommits < Gitlab::Database::Migration[2.2]
milestone '18.0'
# rubocop:disable Migration/ChangeColumnNullOnHighTrafficTable -- We're making them nullable and there is no constraint
def up
change_column_null :merge_request_diff_commits, :sha, true
change_column_null :merge_request_diff_commits, :trailers, true
end
# rubocop:enable Migration/ChangeColumnNullOnHighTrafficTable
def down
# no-op

View File

@ -1,6 +1,5 @@
# frozen_string_literal: true
# rubocop:disable Migration/ChangeColumnNullOnHighTrafficTable -- The cop is introduced after this migration.
class RemoveNotNullConstraintFromTraversalIdsIfExists < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '16.11'
@ -27,4 +26,3 @@ class RemoveNotNullConstraintFromTraversalIdsIfExists < Gitlab::Database::Migrat
# no-op
end
end
# rubocop:enable Migration/ChangeColumnNullOnHighTrafficTable

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class QueueFixStringConfigHashesGroupStreamingDestinations < Gitlab::Database::Migration[2.2]
milestone '18.0'
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = "FixStringConfigHashesGroupStreamingDestinations"
BATCH_SIZE = 100
SUB_BATCH_SIZE = 10
def up
queue_batched_background_migration(
MIGRATION,
:audit_events_group_external_streaming_destinations,
:id,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :audit_events_group_external_streaming_destinations, :id, [])
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class QueueFixStringConfigHashesInstanceStreamingDestinations < Gitlab::Database::Migration[2.2]
milestone '18.0'
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = "FixStringConfigHashesInstanceStreamingDestinations"
BATCH_SIZE = 100
SUB_BATCH_SIZE = 10
def up
queue_batched_background_migration(
MIGRATION,
:audit_events_instance_external_streaming_destinations,
:id,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :audit_events_instance_external_streaming_destinations, :id, [])
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexProtectedBranchPushAccessLevelsOnProtectedBranchProjectId < Gitlab::Database::Migration[2.2]
milestone '18.0'
disable_ddl_transaction!
INDEX_NAME = 'index_protected_branch_push_access_levels_on_protected_branch_p'
def up
add_concurrent_index :protected_branch_push_access_levels, :protected_branch_project_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :protected_branch_push_access_levels, INDEX_NAME
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddProtectedBranchPushAccessLevelsProtectedBranchProjectIdFk < Gitlab::Database::Migration[2.2]
milestone '18.0'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :protected_branch_push_access_levels, :projects, column: :protected_branch_project_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :protected_branch_push_access_levels, column: :protected_branch_project_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddProtectedBranchPushAccessLevelsProtectedBranchProjectIdTrigger < Gitlab::Database::Migration[2.2]
milestone '18.0'
def up
install_sharding_key_assignment_trigger(
table: :protected_branch_push_access_levels,
sharding_key: :protected_branch_project_id,
parent_table: :protected_branches,
parent_sharding_key: :project_id,
foreign_key: :protected_branch_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :protected_branch_push_access_levels,
sharding_key: :protected_branch_project_id,
parent_table: :protected_branches,
parent_sharding_key: :project_id,
foreign_key: :protected_branch_id
)
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexProtectedBranchPushAccessLevelsOnProtectedBranchNamespaceId < Gitlab::Database::Migration[2.2]
milestone '18.0'
disable_ddl_transaction!
INDEX_NAME = 'index_protected_branch_push_access_levels_on_protected_branch_n'
def up
add_concurrent_index :protected_branch_push_access_levels, :protected_branch_namespace_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :protected_branch_push_access_levels, INDEX_NAME
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddProtectedBranchPushAccessLevelsProtectedBranchNamespaceIdFk < Gitlab::Database::Migration[2.2]
milestone '18.0'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :protected_branch_push_access_levels, :namespaces,
column: :protected_branch_namespace_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :protected_branch_push_access_levels, column: :protected_branch_namespace_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddProtectedBranchPushAccessLevelsProtectedBranchNamespaceIdTrigger < Gitlab::Database::Migration[2.2]
milestone '18.0'
def up
install_sharding_key_assignment_trigger(
table: :protected_branch_push_access_levels,
sharding_key: :protected_branch_namespace_id,
parent_table: :protected_branches,
parent_sharding_key: :namespace_id,
foreign_key: :protected_branch_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :protected_branch_push_access_levels,
sharding_key: :protected_branch_namespace_id,
parent_table: :protected_branches,
parent_sharding_key: :namespace_id,
foreign_key: :protected_branch_id
)
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class IntConversionForProtectedBranchPushAccessLevelsFields < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '18.0'
TABLE = :protected_branch_push_access_levels
COLUMNS = %i[id protected_branch_id user_id group_id deploy_key_id]
def up
initialize_conversion_of_integer_to_bigint(TABLE, COLUMNS)
end
def down
revert_initialize_conversion_of_integer_to_bigint(TABLE, COLUMNS)
end
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
class QueueBackfillProtectedBranchPushAccessLevelsFields < Gitlab::Database::Migration[2.2]
milestone '18.0'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillProtectedBranchPushAccessLevelsFields"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:protected_branch_push_access_levels,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:protected_branch_push_access_levels,
:id,
[]
)
end
end

View File

@ -0,0 +1 @@
fc74ee7659582047ff80b7ae0f82a6cca6c44178bdf25f6785cb296e2368f10a

View File

@ -0,0 +1 @@
4331890af7c3f3160228795ff472073812024bce537af87453d53717f82b8c0e

View File

@ -0,0 +1 @@
1d983d3b65c64b6fd515e27dcb03aecd240770c111379fe3fa17d0879de31965

View File

@ -0,0 +1 @@
f2120497fb547da5c515da8355ed40af915f9afc0fb77bd2cc384185c602cabe

View File

@ -0,0 +1 @@
f638bd9948347d38f8439d3755a2e89d297de07debe6573bfebff0c4a7dbe475

View File

@ -0,0 +1 @@
1c74e2ec07e4424f5b736ef9f764b2e0941b56bb84ab10bebd0c1b7171f67984

View File

@ -0,0 +1 @@
5b9b6439940b575b7e3ff88411d15469d5645f75082a5c5ee682c62a05d4ecc5

View File

@ -0,0 +1 @@
ab0deb7ce441a02283208bf5253498fe33bf6ecf6a34623ed6beb3a27c059b33

View File

@ -0,0 +1 @@
ccdba2146deda114c1ab90b0a0e2f26383497a823fd63ea66abbb0426ec6fa38

View File

@ -0,0 +1 @@
67a7a5942983782bbe1b3633a1e2999b09b81b8733b0a1d29023724e579c2473

View File

@ -0,0 +1 @@
e77791a135a195582aca639397fb2e198109ce9a157bd43d8368ff2738df23c2

View File

@ -0,0 +1 @@
b1692892e1bc9684cd824b0190d7090fab917a5ef14e749a7e865a78d69441fe

View File

@ -1168,6 +1168,22 @@ BEGIN
END;
$$;
CREATE FUNCTION trigger_009314eae986() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."protected_branch_project_id" IS NULL THEN
SELECT "project_id"
INTO NEW."protected_branch_project_id"
FROM "protected_branches"
WHERE "protected_branches"."id" = NEW."protected_branch_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_01b3fc052119() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -2690,6 +2706,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_744ab45ee5ac() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."protected_branch_namespace_id" IS NULL THEN
SELECT "namespace_id"
INTO NEW."protected_branch_namespace_id"
FROM "protected_branches"
WHERE "protected_branches"."id" = NEW."protected_branch_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_7495f5e0efcb() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -21793,7 +21825,9 @@ CREATE TABLE protected_branch_push_access_levels (
updated_at timestamp without time zone NOT NULL,
user_id bigint,
group_id bigint,
deploy_key_id bigint
deploy_key_id bigint,
protected_branch_project_id bigint,
protected_branch_namespace_id bigint
);
CREATE SEQUENCE protected_branch_push_access_levels_id_seq
@ -37130,6 +37164,10 @@ CREATE INDEX index_protected_branch_push_access ON protected_branch_push_access_
CREATE INDEX index_protected_branch_push_access_levels_on_group_id ON protected_branch_push_access_levels USING btree (group_id);
CREATE INDEX index_protected_branch_push_access_levels_on_protected_branch_n ON protected_branch_push_access_levels USING btree (protected_branch_namespace_id);
CREATE INDEX index_protected_branch_push_access_levels_on_protected_branch_p ON protected_branch_push_access_levels USING btree (protected_branch_project_id);
CREATE INDEX index_protected_branch_push_access_levels_on_user_id ON protected_branch_push_access_levels USING btree (user_id);
CREATE INDEX index_protected_branch_unprotect_access ON protected_branch_unprotect_access_levels USING btree (protected_branch_id);
@ -41468,6 +41506,8 @@ CREATE TRIGGER tags_loose_fk_trigger AFTER DELETE ON tags REFERENCING OLD TABLE
CREATE TRIGGER terraform_state_versions_loose_fk_trigger AFTER DELETE ON terraform_state_versions REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
CREATE TRIGGER trigger_009314eae986 BEFORE INSERT OR UPDATE ON protected_branch_push_access_levels FOR EACH ROW EXECUTE FUNCTION trigger_009314eae986();
CREATE TRIGGER trigger_01b3fc052119 BEFORE INSERT OR UPDATE ON approval_merge_request_rules FOR EACH ROW EXECUTE FUNCTION trigger_01b3fc052119();
CREATE TRIGGER trigger_02450faab875 BEFORE INSERT OR UPDATE ON vulnerability_occurrence_identifiers FOR EACH ROW EXECUTE FUNCTION trigger_02450faab875();
@ -41658,6 +41698,8 @@ CREATE TRIGGER trigger_738125833856 BEFORE INSERT OR UPDATE ON bulk_import_confi
CREATE TRIGGER trigger_740afa9807b8 BEFORE INSERT OR UPDATE ON subscription_user_add_on_assignments FOR EACH ROW EXECUTE FUNCTION trigger_740afa9807b8();
CREATE TRIGGER trigger_744ab45ee5ac BEFORE INSERT OR UPDATE ON protected_branch_push_access_levels FOR EACH ROW EXECUTE FUNCTION trigger_744ab45ee5ac();
CREATE TRIGGER trigger_7495f5e0efcb BEFORE INSERT OR UPDATE ON snippet_user_mentions FOR EACH ROW EXECUTE FUNCTION trigger_7495f5e0efcb();
CREATE TRIGGER trigger_765cae42cd77 BEFORE INSERT OR UPDATE ON bulk_import_trackers FOR EACH ROW EXECUTE FUNCTION trigger_765cae42cd77();
@ -42877,6 +42919,9 @@ ALTER TABLE ONLY deploy_tokens
ALTER TABLE ONLY oauth_openid_requests
ADD CONSTRAINT fk_7092424b77 FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
ALTER TABLE ONLY protected_branch_push_access_levels
ADD CONSTRAINT fk_70dc11e706 FOREIGN KEY (protected_branch_namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY bulk_import_failures
ADD CONSTRAINT fk_70f30b02fd FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -43861,6 +43906,9 @@ ALTER TABLE ONLY ci_sources_pipelines
ALTER TABLE p_ci_builds_metadata
ADD CONSTRAINT fk_e20479742e_p FOREIGN KEY (partition_id, build_id) REFERENCES p_ci_builds(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY protected_branch_push_access_levels
ADD CONSTRAINT fk_e23067f9e1 FOREIGN KEY (protected_branch_project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY gitlab_subscriptions
ADD CONSTRAINT fk_e2595d00a1 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;

View File

@ -74,9 +74,9 @@ Some group items are excluded from migration because they:
- Deploy tokens
- Webhooks
- Are not supported:
- Push rules
- Iteration cadence settings
- Pending member invitations
- Push rules
## Migrated project items
@ -220,8 +220,8 @@ Setting-related project items that are migrated to the destination GitLab instan
Some project items are excluded from migration because they:
- Might contain sensitive information:
- CI/CD variables
- CI/CD job logs
- CI/CD variables
- Container registry images
- Deploy keys
- Deploy tokens
@ -232,6 +232,11 @@ Some project items are excluded from migration because they:
- Webhooks
- Are not supported:
- Agents
- Container registry
- Environments
- Feature flags
- Infrastructure registry
- Linked issues
- Merge request approval rules
{{< alert type="note" >}}
@ -240,11 +245,8 @@ Some project items are excluded from migration because they:
{{< /alert >}}
- Container registry
- Environments
- Feature flags
- Infrastructure registry
- Merge request dependencies
- Package registry
- Pages domains
- Remote mirrors
- Pending member invitations
- Remote mirrors

View File

@ -4,10 +4,12 @@ module Authn
module Tokens
class PersonalAccessToken
def self.prefix?(plaintext)
plaintext.start_with?(
token_prefixes = [
::PersonalAccessToken.token_prefix,
ApplicationSetting.defaults[:personal_access_token_prefix]
)
].uniq.compact.reject(&:empty?)
plaintext.start_with?(*token_prefixes)
end
attr_reader :revocable, :source

View File

@ -0,0 +1,54 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillProtectedBranchPushAccessLevelsFields < BatchedMigrationJob
operation_name :backfill_protected_branch_push_access_levels_fields
feature_category :source_code_management
COLUMNS_CONVERT_TO_BIGINT = %w[
id_convert_to_bigint
protected_branch_id_convert_to_bigint
user_id_convert_to_bigint
group_id_convert_to_bigint
deploy_key_id_convert_to_bigint
].freeze
delegate :quote_column_name, :quote_table_name, to: :connection
def perform
each_sub_batch do |sub_batch|
connection.execute(
<<~SQL
WITH filtered_relation AS MATERIALIZED (#{sub_batch.limit(100).to_sql})
UPDATE protected_branch_push_access_levels
SET protected_branch_namespace_id = protected_branches.namespace_id,
protected_branch_project_id = protected_branches.project_id
#{bigint_column_assignments}
FROM filtered_relation INNER JOIN protected_branches
ON protected_branches.id = filtered_relation.protected_branch_id
WHERE protected_branch_push_access_levels.id = filtered_relation.id
SQL
)
end
end
private
def bigint_column_assignments
@bigint_assignments ||=
COLUMNS_CONVERT_TO_BIGINT.filter_map do |bigint_column|
next unless all_column_names.include?(bigint_column)
source_column = bigint_column.sub('_convert_to_bigint', '')
",\n#{quote_column_name(bigint_column)} = filtered_relation.#{quote_column_name(source_column)}"
end.join('')
end
def all_column_names
@all_column_names ||= connection.columns(:protected_branch_push_access_levels).map(&:name)
end
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class FixStringConfigHashesGroupStreamingDestinations < BatchedMigrationJob
# This batched background migration is EE-only
# Migration file: ee/lib/ee/gitlab/background_migration/fix_string_config_hashes_group_streaming_destinations.rb
feature_category :audit_events
def perform; end
end
end
end
Gitlab::BackgroundMigration::FixStringConfigHashesGroupStreamingDestinations.prepend_mod

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class FixStringConfigHashesInstanceStreamingDestinations < BatchedMigrationJob
# This batched background migration is EE-only
# Migration file:
# ee/lib/ee/gitlab/background_migration/fix_string_config_hashes_instance_streaming_destinations.rb
feature_category :audit_events
def perform; end
end
end
end
Gitlab::BackgroundMigration::FixStringConfigHashesInstanceStreamingDestinations.prepend_mod

View File

@ -31,7 +31,7 @@ module Gitlab
'SRV' => Net::DNS::SRV
}.freeze
Address = Struct.new(:hostname, :port) do
Address = Data.define(:hostname, :port) do
def to_s
port ? "#{hostname}:#{port}" : hostname
end
@ -136,11 +136,19 @@ module Gitlab
wait_time
end
def replace_hosts(addresses)
if replace_hosts_enabled?
new_replace_hosts(addresses)
else
old_replace_hosts(addresses)
end
end
# Replaces all the hosts in the load balancer with the new ones,
# disconnecting the old connections.
#
# addresses - An Array of Address structs to use for the new hosts.
def replace_hosts(addresses)
# addresses - An Array of Address Data types to use for the new hosts
def old_replace_hosts(addresses)
old_hosts = load_balancer.host_list.hosts
load_balancer.host_list.hosts = addresses.map do |addr|
@ -155,6 +163,56 @@ module Gitlab
disconnect_old_hosts(old_hosts)
end
# Replace the hosts in the load balancer with the new ones from the addresses provided.
# Reuse existing hosts where the hostname and port remain unchanged.
# Disconnect the old connections.
#
# addresses - An Array of Address Data types to use for the new hosts
def new_replace_hosts(addresses)
old_hosts = load_balancer.host_list.hosts
# Example:
# old_hosts_lookup = {
# Address.new("10.0.1.30", 5432) => [host1, host2],
# Address.new("10.0.1.31", 5432) => [host3]
# }
old_hosts_lookup = old_hosts.each_with_object({}) do |host, hash|
key = Address.new(host.host.to_s, host.port)
(hash[key] ||= []) << host
end
# Find addresses that exist in both the addresses
# and old_hosts_lookup collections
hosts_to_keep = addresses & old_hosts_lookup.keys
# Create new hosts with current kept hosts
new_hosts = addresses.map do |addr|
if hosts_to_keep.include?(addr)
old_hosts_lookup[addr]
else
Host.new(addr.hostname, load_balancer, port: addr.port)
end
end
# Update load balancer's host list
load_balancer.host_list.hosts = new_hosts
# return the elements that are in the old_host_lookup, but not in addresses.
addresses_to_disconnect = old_hosts_lookup.keys - addresses
# Convert addresses to disconnect back to Host objects before disconnecting them
hosts_to_disconnect = addresses_to_disconnect.flat_map do |addr|
old_hosts_lookup[addr] || []
end
# We must explicitly disconnect the old connections, otherwise we may
# leak database connections over time. For example, if a request
# started just before we added the new hosts it will use an old
# host/connection. While this connection will be checked in and out,
# it won't be explicitly disconnected.
disconnect_old_hosts(hosts_to_disconnect)
end
# Returns an Array containing:
#
# 1. The time to wait for the next check.
@ -224,6 +282,10 @@ module Gitlab
private
def replace_hosts_enabled?
Feature.enabled?(:load_balancer_replace_hosts, Feature.current_pod)
end
def record_type_for(type)
RECORD_TYPES.fetch(type) do
raise(ArgumentError, "Unsupported record type: #{type}")
@ -242,7 +304,7 @@ module Gitlab
end
def addresses_from_a_record(resources)
resources.map { |r| Address.new(r.address.to_s) }
resources.map { |r| Address.new(r.address.to_s, nil) }
end
def sampler

View File

@ -16569,9 +16569,6 @@ msgstr ""
msgid "Confidentiality"
msgstr ""
msgid "Confidentiality controls have moved to the issue actions menu (%{icon}) at the top of the page."
msgstr ""
msgid "Configuration help"
msgstr ""
@ -68935,6 +68932,9 @@ msgstr ""
msgid "WorkItem|Something went wrong when trying to link a item. Please try again."
msgstr ""
msgid "WorkItem|Something went wrong while bulk editing."
msgstr ""
msgid "WorkItem|Something went wrong while copying the %{workItemType} email address. Please try again."
msgstr ""

View File

@ -26,6 +26,16 @@ module RuboCop
return unless table_name
nullable = node.arguments.third
# constraint validation only needs to be performed when _adding_ a constraint,
# not when removing a constraint.
#
# rubocop:disable Lint/BooleanSymbol -- Node#type? takes the name of the primitive as a symbol
return unless nullable&.type?(:false)
# rubocop:enable Lint/BooleanSymbol
high_traffic_tables.include?(table_name.to_sym)
end
end

View File

@ -154,7 +154,6 @@ spec/frontend/pipeline_wizard/components/step_spec.js
spec/frontend/profile/preferences/components/profile_preferences_spec.js
spec/frontend/projects/commit/components/form_modal_spec.js
spec/frontend/projects/commits/components/author_select_spec.js
spec/frontend/projects/new/components/new_project_url_select_spec.js
spec/frontend/projects/report_abuse/components/report_abuse_dropdown_item_spec.js
spec/frontend/projects/settings/components/branch_rule_modal_spec.js
spec/frontend/projects/settings/topics/components/topics_token_selector_spec.js

View File

@ -159,6 +159,19 @@ describe('issue_note_body component', () => {
});
describe('duo code review feedback', () => {
const createMockStoreWithDiscussion = (discussionId, discussionNotes) => {
return new Vuex.Store({
getters: {
getDiscussion: () => (id) => {
if (id === discussionId) {
return { notes: discussionNotes };
}
return {};
},
},
});
};
it.each`
userType | type | exists | existsText
${'duo_code_review_bot'} | ${null} | ${true} | ${'renders'}
@ -168,13 +181,47 @@ describe('issue_note_body component', () => {
`(
'$existsText code review feedback component when author type is "$userType" and note type is "$type"',
({ userType, type, exists }) => {
const duoNote = {
...note,
id: '1',
type,
discussion_id: 'discussion1',
author: {
...note.author,
user_type: userType,
},
};
const mockStore = createMockStoreWithDiscussion('discussion1', [duoNote]);
createComponent({
props: { note: { ...note, type, author: { ...note.author, user_type: userType } } },
props: { note: duoNote },
store: mockStore,
});
expect(wrapper.findByTestId('code-review-feedback').exists()).toBe(exists);
},
);
it('does not render if not first note in discussion', () => {
const duoNote = {
...note,
id: '9',
type: 'DiscussionNote',
discussion_id: 'discussion1',
author: {
...note.author,
user_type: 'duo_code_review_bot',
},
};
const mockStore = createMockStoreWithDiscussion('discussion1', [note, duoNote]);
createComponent({
props: { note: duoNote },
store: mockStore,
});
expect(wrapper.findByTestId('code-review-feedback').exists()).toBe(false);
});
});
describe('duo code review feedback text', () => {

View File

@ -5,13 +5,13 @@ import {
GlTruncate,
GlSearchBoxByType,
} from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { stubComponent } from 'helpers/stub_component';
import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import eventHub from '~/projects/new/event_hub';
import NewProjectUrlSelect from '~/projects/new/components/new_project_url_select.vue';
@ -76,7 +76,7 @@ describe('NewProjectUrlSelect component', () => {
search = '',
queryResponse = data,
provide = defaultProvide,
mountFn = shallowMount,
mountFn = shallowMountExtended,
} = {}) => {
mockQueryResponse = jest.fn().mockResolvedValue({ data: queryResponse });
const requestHandlers = [[searchQuery, mockQueryResponse]];
@ -102,7 +102,7 @@ describe('NewProjectUrlSelect component', () => {
const findButtonLabel = () => wrapper.findComponent(GlButton);
const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
const findSelectedPath = () => wrapper.findComponent(GlTruncate);
const findHiddenNamespaceInput = () => wrapper.find(`[name="${defaultProvide.inputName}`);
const findHiddenNamespaceInput = () => wrapper.findByTestId(defaultProvide.inputName);
const findAllListboxItems = () => wrapper.findAllComponents(GlListboxItem);
const findToggleButton = () => findDropdown().findComponent(GlButton);
@ -129,7 +129,7 @@ describe('NewProjectUrlSelect component', () => {
describe('when namespaceId is provided', () => {
beforeEach(() => {
wrapper = mountComponent({ mountFn: mount });
wrapper = mountComponent({ mountFn: mountExtended });
});
it('renders a dropdown with the given namespace full path as the text', () => {
@ -159,7 +159,7 @@ describe('NewProjectUrlSelect component', () => {
};
beforeEach(() => {
wrapper = mountComponent({ provide, mountFn: mount });
wrapper = mountComponent({ provide, mountFn: mountExtended });
});
it("renders a dropdown with the user's namespace full path as the text", () => {
@ -177,12 +177,12 @@ describe('NewProjectUrlSelect component', () => {
});
it('renders a hidden input with the selected namespace id', () => {
expect(findHiddenSelectedNamespaceInput().attributes('value')).toBe(undefined);
expect(Boolean(findHiddenSelectedNamespaceInput().attributes('value'))).toBe(false);
});
});
it('renders expected dropdown items', async () => {
wrapper = mountComponent({ mountFn: mount });
wrapper = mountComponent({ mountFn: mountExtended });
await showDropdown();
@ -201,7 +201,7 @@ describe('NewProjectUrlSelect component', () => {
it('does not render users section when user namespace id is not provided', async () => {
wrapper = mountComponent({
mountFn: mount,
mountFn: mountExtended,
provide: { ...defaultProvide, userNamespaceId: null },
});
@ -218,7 +218,7 @@ describe('NewProjectUrlSelect component', () => {
describe('query fetching', () => {
describe('on component mount', () => {
it('does not fetch query', () => {
wrapper = mountComponent({ mountFn: mount });
wrapper = mountComponent({ mountFn: mountExtended });
expect(mockQueryResponse).not.toHaveBeenCalled();
});
@ -226,7 +226,7 @@ describe('NewProjectUrlSelect component', () => {
describe('on dropdown shown', () => {
it('fetches query', async () => {
wrapper = mountComponent({ mountFn: mount });
wrapper = mountComponent({ mountFn: mountExtended });
await showDropdown();
@ -239,7 +239,7 @@ describe('NewProjectUrlSelect component', () => {
const { fullPath, id } = data.currentUser.groups.nodes[1];
beforeEach(async () => {
wrapper = mountComponent({ mountFn: mount });
wrapper = mountComponent({ mountFn: mountExtended });
// Show dropdown to fetch projects
await showDropdown();
@ -278,14 +278,14 @@ describe('NewProjectUrlSelect component', () => {
},
};
wrapper = mountComponent({ search: 'no matches', queryResponse, mountFn: mount });
wrapper = mountComponent({ search: 'no matches', queryResponse, mountFn: mountExtended });
await waitForPromises();
expect(wrapper.find('[data-testid="listbox-no-results-text"]').text()).toBe('No matches found');
});
it('emits `update-visibility` event to update the visibility radio options', async () => {
wrapper = mountComponent({ mountFn: mount });
wrapper = mountComponent({ mountFn: mountExtended });
const spy = jest.spyOn(eventHub, '$emit');
@ -305,7 +305,7 @@ describe('NewProjectUrlSelect component', () => {
});
it('updates hidden input with selected namespace', async () => {
wrapper = mountComponent({ mountFn: mount });
wrapper = mountComponent({ mountFn: mountExtended });
// Show dropdown to fetch projects
await showDropdown();

View File

@ -21,6 +21,7 @@ import {
workItemResponseFactory,
projectMembersAutocompleteResponseWithCurrentUser,
mockAssignees,
mockParticipants,
currentUserResponse,
currentUserNullResponse,
projectMembersAutocompleteResponseWithNoMatchingUsers,
@ -76,6 +77,7 @@ describe('WorkItemAssignees component', () => {
workItemId = 'gid://gitlab/WorkItem/1',
mountFn = shallowMountExtended,
assignees = mockAssignees,
participants = [],
searchQueryHandler = successSearchQueryHandler,
currentUserQueryHandler = successCurrentUserQueryHandler,
allowsMultipleAssignees = false,
@ -109,6 +111,7 @@ describe('WorkItemAssignees component', () => {
wrapper = mountFn(WorkItemAssignees, {
propsData: {
assignees,
participants,
fullPath,
workItemId,
allowsMultipleAssignees,
@ -134,6 +137,30 @@ describe('WorkItemAssignees component', () => {
});
describe('Dropdown search', () => {
it('includes participants in the results when there are matching results', async () => {
const expectedParticipant = {
...mockParticipants[0],
text: mockParticipants[0].name,
value: mockParticipants[0].id,
matcher: `${mockParticipants[0].name} ${mockParticipants[0].username}`,
};
createComponent({
participants: mockParticipants,
searchQueryHandler: successSearchWithNoMatchingUsers,
});
showDropdown();
await waitForPromises();
findSidebarDropdownWidget().vm.$emit('searchStarted', expectedParticipant.username);
await nextTick();
expect(findSidebarDropdownWidget().props('listItems')).toEqual([expectedParticipant]);
});
it('shows no items in the dropdown when no results matching', async () => {
createComponent({ searchQueryHandler: successSearchWithNoMatchingUsers });
showDropdown();

View File

@ -7,6 +7,7 @@ import VueRouter from 'vue-router';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import IssueCardStatistics from 'ee_else_ce/issues/list/components/issue_card_statistics.vue';
import IssueCardTimeInfo from 'ee_else_ce/issues/list/components/issue_card_time_info.vue';
import WorkItemBulkEditSidebar from '~/work_items/components/work_item_bulk_edit/work_item_bulk_edit_sidebar.vue';
import WorkItemHealthStatus from '~/work_items/components/work_item_health_status.vue';
import EmptyStateWithoutAnyIssues from '~/issues/list/components/empty_state_without_any_issues.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
@ -43,22 +44,31 @@ import {
TOKEN_TYPE_UPDATED,
} from '~/vue_shared/components/filtered_search_bar/constants';
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
import CreateWorkItemModal from '~/work_items/components/create_work_item_modal.vue';
import WorkItemsListApp from '~/work_items/pages/work_items_list_app.vue';
import { sortOptions, urlSortParams } from '~/work_items/pages/list/constants';
import getWorkItemStateCountsQuery from '~/work_items/graphql/list/get_work_item_state_counts.query.graphql';
import getWorkItemsQuery from '~/work_items/graphql/list/get_work_items.query.graphql';
import workItemBulkUpdateMutation from '~/work_items/graphql/list/work_item_bulk_update.mutation.graphql';
import workItemParentQuery from '~/work_items/graphql/list//work_item_parent.query.graphql';
import WorkItemDrawer from '~/work_items/components/work_item_drawer.vue';
import {
DETAIL_VIEW_QUERY_PARAM_NAME,
STATE_CLOSED,
WORK_ITEM_TYPE_ENUM_EPIC,
WORK_ITEM_TYPE_ENUM_ISSUE,
WORK_ITEM_TYPE_NAME_EPIC,
WORK_ITEM_TYPE_NAME_INCIDENT,
WORK_ITEM_TYPE_NAME_ISSUE,
WORK_ITEM_TYPE_NAME_KEY_RESULT,
WORK_ITEM_TYPE_NAME_OBJECTIVE,
WORK_ITEM_TYPE_NAME_TASK,
} from '~/work_items/constants';
import { createRouter } from '~/work_items/router';
import {
groupWorkItemsQueryResponse,
groupWorkItemStateCountsQueryResponse,
workItemParentQueryResponse,
} from '../../mock_data';
jest.mock('~/lib/utils/scroll_utils', () => ({ scrollUp: jest.fn() }));
@ -83,6 +93,8 @@ describeSkipVue3(skipReason, () => {
.fn()
.mockResolvedValue(groupWorkItemStateCountsQueryResponse);
const mutationHandler = jest.fn().mockResolvedValue(setSortPreferenceMutationResponse);
const workItemParentQueryHandler = jest.fn().mockResolvedValue(workItemParentQueryResponse);
const workItemBulkUpdateHandler = jest.fn();
const findIssuableList = () => wrapper.findComponent(IssuableList);
const findIssueCardStatistics = () => wrapper.findComponent(IssueCardStatistics);
@ -90,6 +102,9 @@ describeSkipVue3(skipReason, () => {
const findWorkItemHealthStatus = () => wrapper.findComponent(WorkItemHealthStatus);
const findDrawer = () => wrapper.findComponent(WorkItemDrawer);
const findEmptyStateWithoutAnyIssues = () => wrapper.findComponent(EmptyStateWithoutAnyIssues);
const findCreateWorkItemModal = () => wrapper.findComponent(CreateWorkItemModal);
const findBulkEditStartButton = () => wrapper.find('[data-testid="bulk-edit-start-button"]');
const findBulkEditSidebar = () => wrapper.findComponent(WorkItemBulkEditSidebar);
const mountComponent = ({
provide = {},
@ -113,17 +128,26 @@ describeSkipVue3(skipReason, () => {
apolloProvider: createMockApollo([
[getWorkItemsQuery, queryHandler],
[getWorkItemStateCountsQuery, countsQueryHandler],
[workItemParentQuery, workItemParentQueryHandler],
[setSortPreferenceMutation, sortPreferenceMutationResponse],
[workItemBulkUpdateMutation, workItemBulkUpdateHandler],
]),
provide: {
glFeatures: {
okrsMvc: true,
},
autocompleteAwardEmojisPath: 'autocomplete/award/emojis/path',
canBulkUpdate: true,
canBulkEditEpics: true,
fullPath: 'full/path',
hasEpicsFeature: false,
hasGroupBulkEditFeature: true,
hasOkrsFeature: false,
hasQualityManagementFeature: false,
initialSort: CREATED_DESC,
isGroup: true,
isSignedIn: true,
showNewWorkItem: true,
workItemType: null,
hasIssueDateFilterFeature: false,
timeTrackingLimitToHours: false,
@ -875,4 +899,148 @@ describeSkipVue3(skipReason, () => {
expect(tokenTypes).toEqual(expect.arrayContaining([TOKEN_TYPE_CLOSED, TOKEN_TYPE_CREATED]));
});
});
describe('CreateWorkItem modal', () => {
it.each([true, false])('renders depending on showNewWorkItem=%s', async (showNewWorkItem) => {
mountComponent({ provide: { showNewWorkItem } });
await waitForPromises();
expect(findCreateWorkItemModal().exists()).toBe(showNewWorkItem);
});
describe('allowedWorkItemTypes', () => {
it('returns empty array when group', async () => {
mountComponent({ provide: { isGroup: true } });
await waitForPromises();
expect(findCreateWorkItemModal().props('allowedWorkItemTypes')).toEqual([]);
});
it('returns project-level types when project', async () => {
mountComponent({ provide: { isGroup: false } });
await waitForPromises();
expect(findCreateWorkItemModal().props('allowedWorkItemTypes')).toEqual([
WORK_ITEM_TYPE_NAME_INCIDENT,
WORK_ITEM_TYPE_NAME_ISSUE,
WORK_ITEM_TYPE_NAME_TASK,
]);
});
it('returns project-level types including okr types when project and when okrs is enabled', async () => {
mountComponent({ provide: { isGroup: false, hasOkrsFeature: true } });
await waitForPromises();
expect(findCreateWorkItemModal().props('allowedWorkItemTypes')).toEqual([
WORK_ITEM_TYPE_NAME_INCIDENT,
WORK_ITEM_TYPE_NAME_ISSUE,
WORK_ITEM_TYPE_NAME_TASK,
WORK_ITEM_TYPE_NAME_KEY_RESULT,
WORK_ITEM_TYPE_NAME_OBJECTIVE,
]);
});
});
describe('alwaysShowWorkItemTypeSelect', () => {
it.each`
workItemType | value
${WORK_ITEM_TYPE_NAME_ISSUE} | ${true}
${WORK_ITEM_TYPE_NAME_EPIC} | ${false}
`('renders=$value when workItemType=$workItemType', async ({ workItemType, value }) => {
mountComponent({ provide: { workItemType } });
await waitForPromises();
expect(findCreateWorkItemModal().props('alwaysShowWorkItemTypeSelect')).toBe(value);
});
});
describe('preselectedWorkItemType', () => {
it.each`
workItemType | value
${WORK_ITEM_TYPE_NAME_ISSUE} | ${WORK_ITEM_TYPE_ENUM_ISSUE}
${WORK_ITEM_TYPE_NAME_EPIC} | ${WORK_ITEM_TYPE_ENUM_EPIC}
`('renders=$value when workItemType=$workItemType', async ({ workItemType, value }) => {
mountComponent({ provide: { workItemType } });
await waitForPromises();
expect(findCreateWorkItemModal().props('preselectedWorkItemType')).toBe(value);
});
});
});
describe('when bulk editing', () => {
describe('when workItemType=Epic', () => {
it.each([true, false])('renders=$s when canBulkEditEpics=%s', async (canBulkEditEpics) => {
mountComponent({ provide: { canBulkEditEpics, workItemType: WORK_ITEM_TYPE_NAME_EPIC } });
await waitForPromises();
expect(findBulkEditStartButton().exists()).toBe(canBulkEditEpics);
});
});
describe('when group', () => {
it.each`
canBulkUpdate | hasGroupBulkEditFeature | renders
${true} | ${true} | ${true}
${true} | ${false} | ${false}
${false} | ${true} | ${false}
${false} | ${false} | ${false}
`(
'renders=$renders when canBulkUpdate=$canBulkUpdate and hasGroupBulkEditFeature=$hasGroupBulkEditFeature',
async ({ canBulkUpdate, hasGroupBulkEditFeature, renders }) => {
mountComponent({ provide: { isGroup: true, canBulkUpdate, hasGroupBulkEditFeature } });
await waitForPromises();
expect(findBulkEditStartButton().exists()).toBe(renders);
},
);
});
describe('when project', () => {
it.each([true, false])('renders depending on canBulkUpdate=%s', async (canBulkUpdate) => {
mountComponent({ provide: { isGroup: false, canBulkUpdate } });
await waitForPromises();
expect(findBulkEditStartButton().exists()).toBe(canBulkUpdate);
});
});
it('opens the bulk update sidebar when the toggle is clicked', async () => {
mountComponent({ provide: { isGroup: false, canBulkUpdate: true } });
await waitForPromises();
findBulkEditStartButton().vm.$emit('click');
await nextTick();
expect(findIssuableList().props('showBulkEditSidebar')).toBe(true);
});
it('triggers the bulk edit mutation when bulk edit is submitted', async () => {
mountComponent({ provide: { isGroup: false, canBulkUpdate: true } });
await waitForPromises();
const ids = ['gid://gitlab/WorkItem/1', 'gid://gitlab/WorkItem/2'];
const addLabelIds = ['gid://gitlab/Label/1', 'gid://gitlab/Label/2', 'gid://gitlab/Label/3'];
const removeLabelIds = [
'gid://gitlab/Label/4',
'gid://gitlab/Label/5',
'gid://gitlab/Label/6',
];
findBulkEditStartButton().vm.$emit('click');
await waitForPromises();
findBulkEditSidebar().vm.$emit('bulk-update', { ids, addLabelIds, removeLabelIds });
expect(workItemBulkUpdateHandler).toHaveBeenCalledWith({
input: {
parentId: workItemParentQueryResponse.data.namespace.id,
ids,
labelsWidget: {
addLabelIds,
removeLabelIds,
},
},
});
});
});
});

View File

@ -32,6 +32,27 @@ export const mockAssignees = [
},
];
export const mockParticipants = [
{
__typename: 'UserCore',
id: 'gid://gitlab/User/3',
avatarUrl: '',
webUrl: '',
webPath: '/l_zadeh',
name: 'Lotfi Zadeh',
username: 'l_zadeh',
},
{
__typename: 'UserCore',
id: 'gid://gitlab/User/4',
avatarUrl: '',
webUrl: '',
webPath: '/fbar',
name: 'Foo Bar',
username: 'fbar',
},
];
export const mockLabels = [
{
__typename: 'Label',
@ -6459,3 +6480,11 @@ export const errorTrackingQueryResponseWithStackTrace = getErrorTrackingQueryRes
});
export const mockRolledUpHealthStatus = [];
export const workItemParentQueryResponse = {
data: {
namespace: {
id: 'gid://gitlab/Group/1',
},
},
};

View File

@ -108,6 +108,7 @@ RSpec.describe BoardsHelper do
allow(helper).to receive(:can?).with(user, :create_saved_replies, project).and_return(false)
allow(helper).to receive(:can?).with(user, :create_work_item, project.group).and_return(false)
allow(helper).to receive(:can?).with(user, :bulk_admin_epic, project).and_return(false)
allow(helper).to receive(:can?).with(user, :admin_issue, project).and_return(false)
allow(helper).to receive(:can?).with(user, :create_projects, project.group).and_return(false)
end
@ -167,6 +168,7 @@ RSpec.describe BoardsHelper do
allow(helper).to receive(:can?).with(user, :create_saved_replies, base_group).and_return(false)
allow(helper).to receive(:can?).with(user, :create_work_item, base_group).and_return(false)
allow(helper).to receive(:can?).with(user, :bulk_admin_epic, base_group).and_return(false)
allow(helper).to receive(:can?).with(user, :admin_issue, base_group).and_return(false)
allow(helper).to receive(:can?).with(user, :create_projects, base_group).and_return(false)
end

View File

@ -19,6 +19,7 @@ RSpec.describe WorkItemsHelper, feature_category: :team_planning do
{
autocomplete_award_emojis_path: autocomplete_award_emojis_path,
can_admin_label: 'true',
can_bulk_update: 'true',
full_path: project.full_path,
group_path: nil,
issues_list_path: project_issues_path(project),
@ -44,7 +45,7 @@ RSpec.describe WorkItemsHelper, feature_category: :team_planning do
expect(helper.work_items_data(group_project, current_user)).to include(
{
group_path: group_project.group.full_path,
show_new_issue_link: 'true'
show_new_work_item: 'true'
}
)
end

View File

@ -9,6 +9,27 @@ RSpec.describe Authn::Tokens::PersonalAccessToken, feature_category: :system_acc
subject(:token) { described_class.new(plaintext, :group_token_revocation_service) }
describe '.prefix?' do
let_it_be(:token_lookalike) { 'glpat-1234abcd' }
it 'returns true if token starts with a prefix' do
expect(described_class.prefix?(token_lookalike)).to be_truthy
end
it 'is true and falls back to the default token prefix if token setting is nil' do
stub_application_setting(personal_access_token_prefix: nil)
expect(described_class.prefix?(token_lookalike)).to be_truthy
end
# An empty string sent to `start_with?` would cause every value to be true
it 'is false if token setting is an empty string' do
stub_application_setting(personal_access_token_prefix: '')
expect(described_class.prefix?('non-token')).to be_falsey
end
end
context 'with valid personal access token' do
let(:plaintext) { personal_access_token.token }
let(:valid_revocable) { personal_access_token }

View File

@ -0,0 +1,170 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillProtectedBranchPushAccessLevelsFields, feature_category: :source_code_management do
let(:connection) { ApplicationRecord.connection }
let(:migration) do
described_class.new(
batch_table: :protected_branch_push_access_levels,
batch_column: :id,
sub_batch_size: 2,
pause_ms: 0,
connection: connection
)
end
shared_context 'for database tables' do
let(:namespaces) { table(:namespaces) }
let(:organizations) { table(:organizations) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
let(:groups) { table(:namespaces) }
let(:protected_branches) { table(:protected_branches) { |t| t.primary_key = :id } }
let(:protected_branch_push_access_levels) do
table(:protected_branch_push_access_levels) do |t|
t.primary_key = :id
end
end
end
shared_context 'for organization' do
let(:organization) { organizations.create!(name: 'organization', path: 'organization') }
end
shared_context 'for namespaces' do
let(:namespace1) { namespaces.create!(name: 'namespace 1', path: 'namespace1', organization_id: organization.id) }
let(:namespace2) { namespaces.create!(name: 'namespace 2', path: 'namespace2', organization_id: organization.id) }
end
shared_context 'for projects' do
let(:project1) do
projects.create!(
name: 'project 1',
path: 'project1',
namespace_id: namespace1.id,
project_namespace_id: namespace1.id,
organization_id: organization.id
)
end
let(:project2) do
projects.create!(
name: 'project 2',
path: 'project2',
namespace_id: namespace2.id,
project_namespace_id: namespace2.id,
organization_id: organization.id
)
end
end
shared_context 'for users and groups' do
let(:user) do
users.create!(
email: 'test@example.com',
username: 'test_user',
projects_limit: 10
)
end
let(:group) do
groups.create!(
name: 'test-group',
path: 'test-group',
type: 'Group',
organization_id: organization.id
)
end
end
shared_context 'for protected branches' do
let!(:protected_branch1) do
protected_branches.create!(
name: 'master',
namespace_id: namespace1.id
)
end
let!(:protected_branch2) do
protected_branches.create!(
name: 'main',
namespace_id: namespace2.id
)
end
end
shared_context 'for protected branch push access levels' do
let!(:protected_branch_push_access_level_1) do
protected_branch_push_access_levels.create!(
protected_branch_id: protected_branch1.id,
access_level: 0,
user_id: user.id,
protected_branch_namespace_id: nil,
protected_branch_project_id: nil
)
end
end
include_context 'for database tables'
include_context 'for organization'
include_context 'for namespaces'
include_context 'for projects'
include_context 'for users and groups'
include_context 'for protected branches'
include_context 'for protected branch push access levels'
describe '#perform' do
context 'when backfilling all fields' do
it 'backfills the bigint and association fields correctly' do
migration.perform
expect(protected_branch_push_access_level_1.reload.protected_branch_project_id)
.to eq(protected_branch1.project_id)
expect(protected_branch_push_access_level_1.reload.protected_branch_namespace_id)
.to eq(protected_branch1.namespace_id)
end
end
context 'when doing filtering' do
it 'includes the sub-batch filter in the update SQL' do
expect(connection).to receive(:execute) do |sql|
expect(sql).to include("WHERE protected_branch_push_access_levels.id = filtered_relation.id")
end
migration.perform
end
end
end
describe "#bigint_column_assignments" do
subject(:assignment_string) { migration.send(:bigint_column_assignments) }
context "when some expected bigint columns are present" do
before do
allow(migration).to receive(:all_column_names).and_return(
%w[id_convert_to_bigint protected_branch_id_convert_to_bigint]
)
end
it "returns the expected assignment string" do
expected = <<~EXPECTED.strip
,\n"id_convert_to_bigint" = filtered_relation."id",\n"protected_branch_id_convert_to_bigint" = filtered_relation."protected_branch_id"
EXPECTED
expect(assignment_string).to eq(expected)
end
end
context "when none of the expected bigint columns are present" do
before do
allow(migration).to receive(:all_column_names).and_return([])
end
it "returns an empty string" do
expect(assignment_string).to eq('')
end
end
end
end

View File

@ -149,8 +149,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
end
describe '#refresh_if_necessary' do
let(:address_foo) { described_class::Address.new('foo') }
let(:address_bar) { described_class::Address.new('bar') }
let(:address_foo) { described_class::Address.new('foo', nil) }
let(:address_bar) { described_class::Address.new('bar', nil) }
context 'when a refresh is necessary' do
before do
@ -199,20 +199,34 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
.and_return(load_balancer)
end
let(:address_foo) { described_class::Address.new('foo') }
let(:address_bar) { described_class::Address.new('bar') }
let(:address_foo) { described_class::Address.new('foo', nil) }
let(:address_bar) { described_class::Address.new('bar', nil) }
let(:load_balancer) do
Gitlab::Database::LoadBalancing::LoadBalancer.new(
Gitlab::Database::LoadBalancing::Configuration
.new(ActiveRecord::Base, [address_foo])
.new(ActiveRecord::Base, [address_foo.hostname])
)
end
it 'replaces the hosts of the load balancer' do
service.replace_hosts([address_bar])
expect(load_balancer.host_list.host_names_and_ports).to eq([['bar', nil]])
expect(load_balancer.host_list.host_names_and_ports).to match_array([['bar', nil]])
end
it 'reuses existing hosts when hostname and port are unchanged' do
old_host1 = Gitlab::Database::LoadBalancing::Host.new(address_foo.hostname, load_balancer, port: address_foo.port)
old_host2 = Gitlab::Database::LoadBalancing::Host.new(address_bar.hostname, load_balancer, port: address_bar.port)
allow(load_balancer.host_list)
.to receive(:hosts)
.and_return([old_host1, old_host2])
expect(service).to receive(:disconnect_old_hosts).with([old_host2])
service.replace_hosts([address_foo])
expect(load_balancer.host_list.hosts.size).to eq(2)
end
it 'disconnects the old connections gracefully if possible' do
@ -256,6 +270,32 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
service.replace_hosts([address_foo, address_bar])
end
end
context "when replace_hosts_enabled? is true" do
before do
allow(service).to receive(:replace_hosts_enabled?).and_return(true)
allow(service).to receive(:new_replace_hosts).and_call_original
end
it 'calls new_replace_hosts' do
expect(service).to receive(:new_replace_hosts).with([address_bar])
service.replace_hosts([address_bar])
end
end
context "when replace_hosts_enabled? is false" do
before do
allow(service).to receive(:replace_hosts_enabled?).and_return(false)
allow(service).to receive(:old_replace_hosts).and_call_original
end
it 'calls old_replace_hosts' do
expect(service).to receive(:old_replace_hosts).with([address_bar])
service.replace_hosts([address_bar])
end
end
end
describe '#addresses_from_dns' do
@ -289,8 +329,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
it 'returns a TTL and ordered list of IP addresses' do
addresses = [
described_class::Address.new('127.0.0.1'),
described_class::Address.new('255.255.255.0')
described_class::Address.new('127.0.0.1', nil),
described_class::Address.new('255.255.255.0', nil)
]
expect(service.addresses_from_dns).to eq([90, addresses])
@ -389,8 +429,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
it 'returns the ordered host names of the load balancer' do
addresses = [
described_class::Address.new('a'),
described_class::Address.new('b')
described_class::Address.new('a', nil),
described_class::Address.new('b', nil)
]
expect(service.addresses_from_load_balancer).to eq(addresses)

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillProtectedBranchPushAccessLevelsFields, feature_category: :source_code_management do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :protected_branch_push_access_levels,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main_cell,
job_arguments: []
)
}
end
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueFixStringConfigHashesGroupStreamingDestinations, migration: :gitlab_main, feature_category: :audit_events do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
gitlab_schema: :gitlab_main,
table_name: :audit_events_group_external_streaming_destinations,
column_name: :id,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueFixStringConfigHashesInstanceStreamingDestinations, migration: :gitlab_main, feature_category: :audit_events do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
gitlab_schema: :gitlab_main,
table_name: :audit_events_instance_external_streaming_destinations,
column_name: :id,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
end
end

View File

@ -50,11 +50,19 @@ RSpec.describe RuboCop::Cop::Migration::ChangeColumnNullOnHighTrafficTable, feat
'For more details check https://docs.gitlab.com/ee/development/database/not_null_constraints.html#not-null-constraints-on-large-tables'
end
it 'registers an offense' do
it 'registers an offense when setting NOT NULL' do
expect_offense(<<~RUBY)
def up
change_column_null :vulnerabilities, :name
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
change_column_null :vulnerabilities, :name, false
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
end
RUBY
end
it 'does not register an offense when removing NOT NULL' do
expect_no_offenses(<<~RUBY)
def up
change_column_null :vulnerabilities, :name, true
end
RUBY
end