mirror of
https://github.com/gitlabhq/gitlabhq.git
synced 2025-07-23 00:45:28 +00:00
Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
@ -199,7 +199,6 @@ export default {
|
||||
'app/assets/javascripts/sidebar/components/sidebar_dropdown.vue',
|
||||
'app/assets/javascripts/sidebar/components/status/status_dropdown.vue',
|
||||
'app/assets/javascripts/sidebar/components/subscriptions/subscriptions.vue',
|
||||
'app/assets/javascripts/stars/components/star_count.vue',
|
||||
'app/assets/javascripts/tags/components/delete_tag_modal.vue',
|
||||
'app/assets/javascripts/token_access/components/outbound_token_access.vue',
|
||||
'app/assets/javascripts/token_access/components/token_permissions.vue',
|
||||
|
@ -2420,4 +2420,4 @@ DEPENDENCIES
|
||||
yard (~> 0.9)
|
||||
|
||||
BUNDLED WITH
|
||||
2.6.5
|
||||
2.7.1
|
||||
|
@ -2415,4 +2415,4 @@ DEPENDENCIES
|
||||
yard (~> 0.9)
|
||||
|
||||
BUNDLED WITH
|
||||
2.6.5
|
||||
2.7.1
|
||||
|
13
app/assets/javascripts/lib/utils/touch_detection.js
Normal file
13
app/assets/javascripts/lib/utils/touch_detection.js
Normal file
@ -0,0 +1,13 @@
|
||||
/**
|
||||
* Detects if the current device has touch capability
|
||||
*
|
||||
* @returns {boolean} True if device has touch capability, false otherwise
|
||||
*/
|
||||
export function hasTouchCapability() {
|
||||
return Boolean(
|
||||
'ontouchstart' in window ||
|
||||
navigator.maxTouchPoints > 0 ||
|
||||
navigator.msMaxTouchPoints > 0 ||
|
||||
(typeof window.DocumentTouch !== 'undefined' && document instanceof window.DocumentTouch),
|
||||
);
|
||||
}
|
@ -36,7 +36,6 @@ export default {
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
isLoading: false,
|
||||
count: this.starCount,
|
||||
isStarred: this.starred,
|
||||
};
|
||||
|
@ -2,6 +2,7 @@
|
||||
import { GlTooltipDirective } from '@gitlab/ui';
|
||||
import { __ } from '~/locale';
|
||||
import SafeHtml from '~/vue_shared/directives/safe_html';
|
||||
import { hasTouchCapability } from '~/lib/utils/touch_detection';
|
||||
import logo from '../../../../views/shared/_logo.svg?raw';
|
||||
|
||||
export default {
|
||||
@ -21,12 +22,17 @@ export default {
|
||||
default: '',
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
homepageTooltip() {
|
||||
return hasTouchCapability() ? null : this.$options.i18n.homepage;
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<a
|
||||
v-gl-tooltip:super-sidebar.right="$options.i18n.homepage"
|
||||
v-gl-tooltip:super-sidebar.right="homepageTooltip"
|
||||
class="brand-logo gl-inline-block gl-rounded-base gl-border-none gl-bg-transparent gl-p-2 focus:gl-focus active:gl-focus"
|
||||
:href="rootPath"
|
||||
data-track-action="click_link"
|
||||
|
@ -2,6 +2,7 @@
|
||||
import { GlButton, GlTooltipDirective } from '@gitlab/ui';
|
||||
import { __ } from '~/locale';
|
||||
import Tracking from '~/tracking';
|
||||
import { hasTouchCapability } from '~/lib/utils/touch_detection';
|
||||
import { JS_TOGGLE_EXPAND_CLASS, JS_TOGGLE_COLLAPSE_CLASS, sidebarState } from '../constants';
|
||||
import { toggleSuperSidebarCollapsed } from '../super_sidebar_collapsed_state_manager';
|
||||
|
||||
@ -43,6 +44,10 @@ export default {
|
||||
return this.type === 'expand';
|
||||
},
|
||||
tooltip() {
|
||||
if (hasTouchCapability()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.isTypeExpand ? this.$options.tooltipExpand : this.$options.tooltipCollapse;
|
||||
},
|
||||
ariaExpanded() {
|
||||
|
@ -29,7 +29,6 @@ module WorkItemsHelper
|
||||
project_namespace_full_path:
|
||||
resource_parent.is_a?(Project) ? resource_parent.namespace.full_path : resource_parent.full_path,
|
||||
group_id: group&.id,
|
||||
has_issue_date_filter_feature: has_issue_date_filter_feature?(resource_parent, current_user).to_s,
|
||||
time_tracking_limit_to_hours: Gitlab::CurrentSettings.time_tracking_limit_to_hours.to_s
|
||||
}
|
||||
end
|
||||
|
@ -1,5 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
files = git.added_files + git.modified_files
|
||||
|
||||
finalize_batched_background_migration_validator.validate_migrations(files)
|
@ -1,10 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require_relative '../../tooling/danger/finalize_batched_background_migration_validator_helper'
|
||||
|
||||
module Danger
|
||||
class FinalizeBatchedBackgroundMigrationValidator < ::Danger::Plugin
|
||||
# Put the helper code somewhere it can be tested
|
||||
include Tooling::Danger::FinalizeBatchedBackgroundMigrationValidatorHelper
|
||||
end
|
||||
end
|
@ -0,0 +1,9 @@
|
||||
---
|
||||
migration_job_name: BackfillResourceLabelEventsNamespaceIdAndCleanInvalid
|
||||
description: Backfill sharding key based on issue, merge_request or epic namespace. Also remove `issue_id` from records
|
||||
that have both `issue_id` and `epic_id` set.
|
||||
feature_category: team_planning
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/197676
|
||||
milestone: '18.3'
|
||||
queued_migration_version: 20250714214345
|
||||
finalized_by: # version of the migration that finalized this BBM
|
@ -8,6 +8,6 @@ description: Information linking labels with target objects that can be labelled
|
||||
such as issues, MRs and epics
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/03654a6abf47c88b8b980a6707874ff78080d2fe
|
||||
milestone: '7.2'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/514599
|
||||
table_size: medium
|
||||
|
@ -7,6 +7,6 @@ feature_categories:
|
||||
description: Records the addition and removal of issues to iterations
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37617
|
||||
milestone: '13.3'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/514595
|
||||
table_size: small
|
||||
|
@ -8,6 +8,6 @@ description: Records the addition and removal of labels from resources that can
|
||||
labelled; such as issues, MRs and epics
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/6697
|
||||
milestone: '11.2'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/514594
|
||||
table_size: over_limit
|
||||
|
@ -7,6 +7,6 @@ feature_categories:
|
||||
description: Records the change of state of issues between opened and closed
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/28926
|
||||
milestone: '13.0'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/514592
|
||||
table_size: medium
|
||||
|
@ -8,6 +8,6 @@ description: Information related to sent email notifications that supports reply
|
||||
functionality
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/1173
|
||||
milestone: '8.0'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/514591
|
||||
table_size: over_limit
|
||||
|
@ -7,6 +7,6 @@ feature_categories:
|
||||
description: Used to store notes metadata
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/1c3c7fb25d972fc19d5b4bb371cb21094d81e478
|
||||
milestone: '9.1'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/514589
|
||||
table_size: over_limit
|
||||
|
@ -0,0 +1,26 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class QueueBackfillResourceLabelEventsNamespaceIdAndCleanInvalid < Gitlab::Database::Migration[2.3]
|
||||
milestone '18.3'
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
MIGRATION = "BackfillResourceLabelEventsNamespaceIdAndCleanInvalid"
|
||||
MAX_BATCH_SIZE = 50_000
|
||||
BATCH_SIZE = 30_000
|
||||
SUB_BATCH_SIZE = 200
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:resource_label_events,
|
||||
:id,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE,
|
||||
max_batch_size: MAX_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(MIGRATION, :resource_label_events, :id, [])
|
||||
end
|
||||
end
|
1
db/schema_migrations/20250714214345
Normal file
1
db/schema_migrations/20250714214345
Normal file
@ -0,0 +1 @@
|
||||
192e0e719944cd4e6eb4d2624e3592ee54f1e59f333784757bd44ed765bbdf0d
|
@ -44,7 +44,7 @@ An auditor user counts as a billable user and consumes a license seat.
|
||||
To create a new auditor user:
|
||||
|
||||
1. On the left sidebar, at the bottom, select **Admin**.
|
||||
1. Select **Overview > Users**.
|
||||
1. Select **Overview** > **Users**.
|
||||
1. Select **New user**.
|
||||
1. In the **Account** section, enter the required account information.
|
||||
1. For **User type**, select **Auditor**.
|
||||
|
@ -22,7 +22,7 @@ You can change the [Default first day of the week](../../user/profile/preference
|
||||
for the entire GitLab instance:
|
||||
|
||||
1. On the left sidebar, at the bottom, select **Admin**.
|
||||
1. Select **Settings > Preferences**.
|
||||
1. Select **Settings** > **Preferences**.
|
||||
1. Scroll to the **Localization** section, and select your desired first day of the week.
|
||||
|
||||
## Change the default language
|
||||
@ -31,5 +31,5 @@ You can change the [Default language](../../user/profile/preferences.md)
|
||||
for the entire GitLab instance:
|
||||
|
||||
1. On the left sidebar, at the bottom, select **Admin**.
|
||||
1. Select **Settings > Preferences**.
|
||||
1. Select **Settings** > **Preferences**.
|
||||
1. Scroll to the **Localization** section, and select your desired default language.
|
||||
|
@ -96,7 +96,7 @@ operation of the pipeline.
|
||||
To execute a pipeline manually:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Build > Pipelines**.
|
||||
1. Select **Build** > **Pipelines**.
|
||||
1. Select **New pipeline**.
|
||||
1. In the **Run for branch name or tag** field, select the branch or tag to run the pipeline for.
|
||||
1. (Optional) Enter any:
|
||||
@ -251,7 +251,7 @@ The `ci.skip` push option does not skip merge request pipelines.
|
||||
Users with the Owner role for a project can delete a pipeline:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Build > Pipelines**.
|
||||
1. Select **Build** > **Pipelines**.
|
||||
1. Select either the pipeline ID (for example `#123456789`) or the pipeline status icon
|
||||
(for example **Passed**) of the pipeline to delete.
|
||||
1. In the top right of the pipeline details page, select **Delete**.
|
||||
@ -326,7 +326,7 @@ Prerequisites:
|
||||
To trigger the pipeline when the upstream project is rebuilt:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Settings > CI/CD**.
|
||||
1. Select **Settings** > **CI/CD**.
|
||||
1. Expand **Pipeline subscriptions**.
|
||||
1. Select **Add project**.
|
||||
1. Enter the project you want to subscribe to, in the format `<namespace>/<project>`.
|
||||
@ -391,7 +391,7 @@ running time is:
|
||||
To view all the pipelines that ran for your project:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Build > Pipelines**.
|
||||
1. Select **Build** > **Pipelines**.
|
||||
|
||||
You can filter the **Pipelines** page by:
|
||||
|
||||
|
@ -1484,13 +1484,14 @@ background migration.
|
||||
|
||||
```ruby
|
||||
class FinalizeBackfillRouteNamespaceId < Gitlab::Database::Migration[2.1]
|
||||
MIGRATION = 'BackfillRouteNamespaceId'
|
||||
disable_ddl_transaction!
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
def up
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'BackfillRouteNamespaceId',
|
||||
job_class_name: MIGRATION,
|
||||
table_name: :routes,
|
||||
column_name: :id,
|
||||
job_arguments: [],
|
||||
|
@ -84,7 +84,7 @@ To help detect a potential secret leak, you can use the
|
||||
To view the deploy keys available to a project:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Settings > Repository**.
|
||||
1. Select **Settings** > **Repository**.
|
||||
1. Expand **Deploy keys**.
|
||||
|
||||
The deploy keys available are listed:
|
||||
@ -104,7 +104,7 @@ Prerequisites:
|
||||
key on the host that requires access to the repository.
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Settings > Repository**.
|
||||
1. Select **Settings** > **Repository**.
|
||||
1. Expand **Deploy keys**.
|
||||
1. Select **Add new key**.
|
||||
1. Complete the fields.
|
||||
@ -152,7 +152,7 @@ Prerequisites:
|
||||
To grant a public deploy key access to a project:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Settings > Repository**.
|
||||
1. Select **Settings** > **Repository**.
|
||||
1. Expand **Deploy keys**.
|
||||
1. Select **Publicly accessible deploy keys**.
|
||||
1. In the key's row, select **Enable**.
|
||||
@ -169,7 +169,7 @@ Prerequisites:
|
||||
To edit the project access permissions of a deploy key:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Settings > Repository**.
|
||||
1. Select **Settings** > **Repository**.
|
||||
1. Expand **Deploy keys**.
|
||||
1. In the key's row, select **Edit** ({{< icon name="pencil" >}}).
|
||||
1. Select or clear the **Grant write permissions to this key** checkbox.
|
||||
@ -186,7 +186,7 @@ Prerequisites:
|
||||
To disable a deploy key:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Settings > Repository**.
|
||||
1. Select **Settings** > **Repository**.
|
||||
1. Expand **Deploy keys**.
|
||||
1. Select **Disable** ({{< icon name="cancel" >}}).
|
||||
|
||||
|
@ -85,7 +85,7 @@ To view a list of licenses that detected for your project's dependencies,
|
||||
configure [License Compliance](../../compliance/license_scanning_of_cyclonedx_files/_index.md)
|
||||
for your project.
|
||||
|
||||

|
||||

|
||||
|
||||
## External status checks
|
||||
|
||||
|
@ -0,0 +1,96 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
class BackfillResourceLabelEventsNamespaceIdAndCleanInvalid < BatchedMigrationJob
|
||||
operation_name :set_namespace_id
|
||||
feature_category :team_planning
|
||||
|
||||
def perform
|
||||
each_sub_batch do |sub_batch|
|
||||
clean_epic_records(sub_batch)
|
||||
update_issue_records(sub_batch)
|
||||
update_merge_request_records(sub_batch)
|
||||
update_epic_records(sub_batch)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def query_prefix(sub_batch, where_clause)
|
||||
<<~SQL
|
||||
WITH relation AS (
|
||||
#{sub_batch.limit(sub_batch_size).to_sql}
|
||||
),
|
||||
filtered_relation AS (
|
||||
SELECT * FROM relation WHERE #{where_clause} LIMIT #{sub_batch_size}
|
||||
)
|
||||
SQL
|
||||
end
|
||||
|
||||
def clean_epic_records(sub_batch)
|
||||
connection.execute(
|
||||
<<~SQL
|
||||
#{query_prefix(sub_batch, '(num_nonnulls(epic_id, issue_id) = 2)')}
|
||||
UPDATE "resource_label_events"
|
||||
SET
|
||||
"issue_id" = NULL
|
||||
FROM
|
||||
"filtered_relation"
|
||||
WHERE
|
||||
"resource_label_events"."id" = "filtered_relation"."id"
|
||||
SQL
|
||||
)
|
||||
end
|
||||
|
||||
def update_issue_records(sub_batch)
|
||||
connection.execute(
|
||||
<<~SQL
|
||||
#{query_prefix(sub_batch, 'issue_id IS NOT NULL')}
|
||||
UPDATE "resource_label_events"
|
||||
SET
|
||||
"namespace_id" = "issues"."namespace_id"
|
||||
FROM
|
||||
"filtered_relation"
|
||||
INNER JOIN "issues" ON "filtered_relation"."issue_id" = "issues"."id"
|
||||
WHERE
|
||||
"resource_label_events"."id" = "filtered_relation"."id"
|
||||
SQL
|
||||
)
|
||||
end
|
||||
|
||||
def update_merge_request_records(sub_batch)
|
||||
connection.execute(
|
||||
<<~SQL
|
||||
#{query_prefix(sub_batch, 'merge_request_id IS NOT NULL')}
|
||||
UPDATE "resource_label_events"
|
||||
SET
|
||||
"namespace_id" = "projects"."project_namespace_id"
|
||||
FROM
|
||||
filtered_relation
|
||||
INNER JOIN merge_requests ON filtered_relation.merge_request_id = merge_requests.id
|
||||
INNER JOIN projects ON projects.id = merge_requests.target_project_id
|
||||
WHERE
|
||||
"resource_label_events"."id" = "filtered_relation"."id"
|
||||
SQL
|
||||
)
|
||||
end
|
||||
|
||||
def update_epic_records(sub_batch)
|
||||
connection.execute(
|
||||
<<~SQL
|
||||
#{query_prefix(sub_batch, 'epic_id IS NOT NULL')}
|
||||
UPDATE "resource_label_events"
|
||||
SET
|
||||
"namespace_id" = "epics"."group_id"
|
||||
FROM
|
||||
filtered_relation
|
||||
INNER JOIN epics ON filtered_relation.epic_id = epics.id
|
||||
WHERE
|
||||
"resource_label_events"."id" = "filtered_relation"."id"
|
||||
SQL
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
@ -301,7 +301,7 @@ module Gitlab
|
||||
|
||||
def protected_ref?
|
||||
strong_memoize(:protected_ref) do
|
||||
project.protected_for?(pipeline.jobs_git_ref)
|
||||
pipeline.protected_ref?
|
||||
end
|
||||
end
|
||||
|
||||
|
101
spec/frontend/lib/utils/touch_detection_spec.js
Normal file
101
spec/frontend/lib/utils/touch_detection_spec.js
Normal file
@ -0,0 +1,101 @@
|
||||
import { hasTouchCapability } from '~/lib/utils/touch_detection';
|
||||
|
||||
describe('Touch Detection Utility', () => {
|
||||
let originalOntouchstart;
|
||||
let originalMaxTouchPoints;
|
||||
let originalMsMaxTouchPoints;
|
||||
let originalDocumentTouch;
|
||||
|
||||
beforeEach(() => {
|
||||
originalOntouchstart = window.ontouchstart;
|
||||
originalMaxTouchPoints = Object.getOwnPropertyDescriptor(navigator, 'maxTouchPoints');
|
||||
originalMsMaxTouchPoints = Object.getOwnPropertyDescriptor(navigator, 'msMaxTouchPoints');
|
||||
originalDocumentTouch = window.DocumentTouch;
|
||||
|
||||
delete window.ontouchstart;
|
||||
delete window.DocumentTouch;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (originalOntouchstart !== undefined) {
|
||||
window.ontouchstart = originalOntouchstart;
|
||||
} else {
|
||||
delete window.ontouchstart;
|
||||
}
|
||||
|
||||
if (originalMaxTouchPoints) {
|
||||
Object.defineProperty(navigator, 'maxTouchPoints', originalMaxTouchPoints);
|
||||
} else {
|
||||
delete navigator.maxTouchPoints;
|
||||
}
|
||||
|
||||
if (originalMsMaxTouchPoints) {
|
||||
Object.defineProperty(navigator, 'msMaxTouchPoints', originalMsMaxTouchPoints);
|
||||
} else {
|
||||
delete navigator.msMaxTouchPoints;
|
||||
}
|
||||
|
||||
if (originalDocumentTouch) {
|
||||
window.DocumentTouch = originalDocumentTouch;
|
||||
}
|
||||
});
|
||||
|
||||
describe('hasTouchCapability', () => {
|
||||
it('returns true when ontouchstart is available', () => {
|
||||
window.ontouchstart = null;
|
||||
|
||||
expect(hasTouchCapability()).toBe(true);
|
||||
});
|
||||
|
||||
it('returns true when maxTouchPoints is greater than 0', () => {
|
||||
Object.defineProperty(navigator, 'maxTouchPoints', {
|
||||
value: 1,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
expect(hasTouchCapability()).toBe(true);
|
||||
});
|
||||
|
||||
it('returns true when msMaxTouchPoints is greater than 0', () => {
|
||||
Object.defineProperty(navigator, 'maxTouchPoints', {
|
||||
value: 0,
|
||||
configurable: true,
|
||||
});
|
||||
Object.defineProperty(navigator, 'msMaxTouchPoints', {
|
||||
value: 1,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
expect(hasTouchCapability()).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false when no touch capability detected', () => {
|
||||
Object.defineProperty(navigator, 'maxTouchPoints', {
|
||||
value: 0,
|
||||
configurable: true,
|
||||
});
|
||||
Object.defineProperty(navigator, 'msMaxTouchPoints', {
|
||||
value: 0,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
expect(hasTouchCapability()).toBe(false);
|
||||
});
|
||||
|
||||
it('returns true when DocumentTouch is available and document is instance of DocumentTouch', () => {
|
||||
Object.defineProperty(navigator, 'maxTouchPoints', {
|
||||
value: 0,
|
||||
configurable: true,
|
||||
});
|
||||
Object.defineProperty(navigator, 'msMaxTouchPoints', {
|
||||
value: 0,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
window.DocumentTouch = function DocumentTouch() {};
|
||||
Object.setPrototypeOf(document, window.DocumentTouch.prototype);
|
||||
|
||||
expect(hasTouchCapability()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
@ -1,5 +1,6 @@
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import { createMockDirective } from 'helpers/vue_mock_directive';
|
||||
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
|
||||
import * as touchDetection from '~/lib/utils/touch_detection';
|
||||
import BrandLogo from 'jh_else_ce/super_sidebar/components/brand_logo.vue';
|
||||
|
||||
describe('Brand Logo component', () => {
|
||||
@ -11,6 +12,7 @@ describe('Brand Logo component', () => {
|
||||
|
||||
const findBrandLogo = () => wrapper.findByTestId('brand-header-custom-logo');
|
||||
const findDefaultLogo = () => wrapper.findByTestId('brand-header-default-logo');
|
||||
const getTooltip = () => getBinding(wrapper.element, 'gl-tooltip').value;
|
||||
|
||||
const createWrapper = (props = {}) => {
|
||||
wrapper = shallowMountExtended(BrandLogo, {
|
||||
@ -27,16 +29,50 @@ describe('Brand Logo component', () => {
|
||||
});
|
||||
};
|
||||
|
||||
it('renders it', () => {
|
||||
createWrapper();
|
||||
expect(findBrandLogo().exists()).toBe(true);
|
||||
expect(findBrandLogo().element.src).toBe(defaultPropsData.logoUrl);
|
||||
describe('basic functionality', () => {
|
||||
it('renders it', () => {
|
||||
createWrapper();
|
||||
expect(findBrandLogo().exists()).toBe(true);
|
||||
expect(findBrandLogo().element.src).toBe(defaultPropsData.logoUrl);
|
||||
});
|
||||
|
||||
it('when logoUrl given empty', () => {
|
||||
createWrapper({ logoUrl: '' });
|
||||
|
||||
expect(findBrandLogo().exists()).toBe(false);
|
||||
expect(findDefaultLogo().exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('when logoUrl given empty', () => {
|
||||
createWrapper({ logoUrl: '' });
|
||||
describe('tooltip behavior', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(touchDetection, 'hasTouchCapability');
|
||||
});
|
||||
|
||||
expect(findBrandLogo().exists()).toBe(false);
|
||||
expect(findDefaultLogo().exists()).toBe(true);
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('shows homepage tooltip on non-touch devices', () => {
|
||||
touchDetection.hasTouchCapability.mockReturnValue(false);
|
||||
createWrapper();
|
||||
|
||||
expect(getTooltip()).toBe('Homepage');
|
||||
});
|
||||
|
||||
it('hides homepage tooltip on touch devices', () => {
|
||||
touchDetection.hasTouchCapability.mockReturnValue(true);
|
||||
createWrapper();
|
||||
|
||||
expect(getTooltip()).toBeNull();
|
||||
});
|
||||
|
||||
it('calls hasTouchCapability when computing tooltip', () => {
|
||||
touchDetection.hasTouchCapability.mockReturnValue(false);
|
||||
createWrapper();
|
||||
|
||||
expect(getTooltip()).toBe('Homepage');
|
||||
expect(touchDetection.hasTouchCapability).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -3,6 +3,7 @@ import { GlButton } from '@gitlab/ui';
|
||||
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
|
||||
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import * as touchDetection from '~/lib/utils/touch_detection';
|
||||
import { JS_TOGGLE_COLLAPSE_CLASS, JS_TOGGLE_EXPAND_CLASS } from '~/super_sidebar/constants';
|
||||
import SuperSidebarToggle from '~/super_sidebar/components/super_sidebar_toggle.vue';
|
||||
import { toggleSuperSidebarCollapsed } from '~/super_sidebar/super_sidebar_collapsed_state_manager';
|
||||
@ -51,15 +52,56 @@ describe('SuperSidebarToggle component', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('tooltip', () => {
|
||||
it('displays "Hide sidebar" when type is collapse', () => {
|
||||
createWrapper({ type: 'collapse' });
|
||||
expect(getTooltip().title).toBe('Hide sidebar');
|
||||
describe('tooltip behavior', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(touchDetection, 'hasTouchCapability');
|
||||
});
|
||||
|
||||
it('displays "Keep sidebar visible" when type is expand', () => {
|
||||
createWrapper();
|
||||
expect(getTooltip().title).toBe('Keep sidebar visible');
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('on non-touch devices', () => {
|
||||
beforeEach(() => {
|
||||
touchDetection.hasTouchCapability.mockReturnValue(false);
|
||||
});
|
||||
|
||||
it('displays "Hide sidebar" when type is collapse', () => {
|
||||
createWrapper({ type: 'collapse' });
|
||||
expect(getTooltip().title).toBe('Hide sidebar');
|
||||
expect(getTooltip().placement).toBe('bottom');
|
||||
expect(getTooltip().container).toBe('super-sidebar');
|
||||
});
|
||||
|
||||
it('displays "Keep sidebar visible" when type is expand', () => {
|
||||
createWrapper();
|
||||
expect(getTooltip().title).toBe('Keep sidebar visible');
|
||||
expect(getTooltip().placement).toBe('right');
|
||||
});
|
||||
});
|
||||
|
||||
describe('on touch devices', () => {
|
||||
beforeEach(() => {
|
||||
touchDetection.hasTouchCapability.mockReturnValue(true);
|
||||
});
|
||||
|
||||
it('disables tooltip when type is expand', () => {
|
||||
createWrapper({ type: 'expand' });
|
||||
|
||||
expect(getTooltip()).toBeNull();
|
||||
});
|
||||
|
||||
it('disables tooltip when type is collapse', () => {
|
||||
createWrapper({ type: 'collapse' });
|
||||
|
||||
expect(getTooltip()).toBeNull();
|
||||
});
|
||||
|
||||
it('calls hasTouchCapability when computing tooltip', () => {
|
||||
createWrapper();
|
||||
|
||||
expect(touchDetection.hasTouchCapability).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -0,0 +1,202 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::BackfillResourceLabelEventsNamespaceIdAndCleanInvalid, :migration_with_transaction, feature_category: :team_planning do
|
||||
let(:resource_label_events) { table(:resource_label_events) }
|
||||
let(:organization) { table(:organizations).create!(name: 'organization', path: 'organization') }
|
||||
let(:project_namespace) do
|
||||
table(:namespaces).create!(name: "project1", path: "project1", organization_id: organization.id)
|
||||
end
|
||||
|
||||
let(:label_namespace) do
|
||||
table(:namespaces).create!(name: "labelGroup", path: "labelg", organization_id: organization.id)
|
||||
end
|
||||
|
||||
let(:label) { table(:labels).create!(title: 'label1', color: "#990000", group_id: label_namespace.id) }
|
||||
|
||||
let(:issue_namespace) do
|
||||
table(:namespaces).create!(name: "group1", path: "group1", organization_id: organization.id)
|
||||
end
|
||||
|
||||
let(:epic_namespace) do
|
||||
table(:namespaces).create!(name: "group2", path: "group2", organization_id: organization.id)
|
||||
end
|
||||
|
||||
let(:fake_namespace) do
|
||||
# Can't create resource_label_events without a namespace in specs due to the invalid FK
|
||||
table(:namespaces).create!(id: 0, name: "fake", path: "fake", organization_id: organization.id)
|
||||
end
|
||||
|
||||
let(:project) do
|
||||
table(:projects).create!(
|
||||
namespace_id: project_namespace.id,
|
||||
project_namespace_id: project_namespace.id,
|
||||
organization_id: organization.id
|
||||
)
|
||||
end
|
||||
|
||||
let(:merge_request) do
|
||||
table(:merge_requests).create!(target_project_id: project.id, target_branch: 'main', source_branch: 'not-main')
|
||||
end
|
||||
|
||||
let(:issue_work_item_type_id) { 1 }
|
||||
let(:issue) do
|
||||
table(:issues).create!(
|
||||
title: 'First issue',
|
||||
iid: 1,
|
||||
namespace_id: issue_namespace.id,
|
||||
work_item_type_id: issue_work_item_type_id
|
||||
)
|
||||
end
|
||||
|
||||
let(:user) do
|
||||
table(:users).create!(
|
||||
username: 'john_doe',
|
||||
email: 'johndoe@gitlab.com',
|
||||
projects_limit: 2,
|
||||
organization_id: organization.id
|
||||
)
|
||||
end
|
||||
|
||||
let(:epic) do
|
||||
table(:epics).create!(
|
||||
iid: 1,
|
||||
group_id: epic_namespace.id,
|
||||
author_id: user.id,
|
||||
title: 't',
|
||||
title_html: 't',
|
||||
issue_id: issue.id
|
||||
)
|
||||
end
|
||||
|
||||
let!(:issue_label_event1) do
|
||||
resource_label_events.create!(
|
||||
issue_id: issue.id,
|
||||
label_id: label.id,
|
||||
action: 1,
|
||||
user_id: user.id,
|
||||
namespace_id: fake_namespace.id
|
||||
)
|
||||
end
|
||||
|
||||
let!(:issue_label_event2) do
|
||||
resource_label_events.create!(
|
||||
issue_id: issue.id,
|
||||
label_id: label.id,
|
||||
action: 1,
|
||||
user_id: user.id,
|
||||
namespace_id: fake_namespace.id
|
||||
)
|
||||
end
|
||||
|
||||
let!(:mr_label_event1) do
|
||||
resource_label_events.create!(
|
||||
merge_request_id: merge_request.id,
|
||||
label_id: label.id,
|
||||
action: 1,
|
||||
user_id: user.id,
|
||||
namespace_id: fake_namespace.id
|
||||
)
|
||||
end
|
||||
|
||||
let!(:mr_label_event2) do
|
||||
resource_label_events.create!(
|
||||
merge_request_id: merge_request.id,
|
||||
label_id: label.id,
|
||||
action: 1,
|
||||
user_id: user.id,
|
||||
namespace_id: fake_namespace.id
|
||||
)
|
||||
end
|
||||
|
||||
let!(:epic_label_event1) do
|
||||
resource_label_events.create!(
|
||||
epic_id: epic.id,
|
||||
label_id: label.id,
|
||||
action: 1,
|
||||
user_id: user.id,
|
||||
namespace_id: fake_namespace.id
|
||||
)
|
||||
end
|
||||
|
||||
let!(:epic_label_event2) do
|
||||
resource_label_events.create!(
|
||||
epic_id: epic.id,
|
||||
label_id: label.id,
|
||||
action: 1,
|
||||
user_id: user.id,
|
||||
namespace_id: fake_namespace.id
|
||||
)
|
||||
end
|
||||
|
||||
let!(:invalid_label_event) do
|
||||
# Necessary as we can no longer create invalid test data due to the contraint, but we know it exists in production
|
||||
resource_label_events.connection.execute(<<~SQL)
|
||||
ALTER TABLE resource_label_events DROP CONSTRAINT check_614704e750;
|
||||
SQL
|
||||
|
||||
event = resource_label_events.create!(
|
||||
epic_id: epic.id,
|
||||
issue_id: issue.id,
|
||||
label_id: label.id,
|
||||
action: 1,
|
||||
user_id: user.id,
|
||||
namespace_id: fake_namespace.id
|
||||
)
|
||||
|
||||
resource_label_events.connection.execute(<<~SQL)
|
||||
ALTER TABLE resource_label_events
|
||||
ADD CONSTRAINT check_614704e750 CHECK ((num_nonnulls(epic_id, issue_id, merge_request_id) = 1)) NOT VALID;
|
||||
SQL
|
||||
|
||||
event
|
||||
end
|
||||
|
||||
let(:migration) do
|
||||
start_id, end_id = resource_label_events.pick('MIN(id), MAX(id)')
|
||||
|
||||
described_class.new(
|
||||
start_id: start_id,
|
||||
end_id: end_id,
|
||||
batch_table: :resource_label_events,
|
||||
batch_column: :id,
|
||||
sub_batch_size: 2,
|
||||
pause_ms: 0,
|
||||
job_arguments: [],
|
||||
connection: ApplicationRecord.connection
|
||||
)
|
||||
end
|
||||
|
||||
subject(:migrate) { migration.perform }
|
||||
|
||||
describe '#up' do
|
||||
it 'updates records in batches' do
|
||||
expect do
|
||||
migrate
|
||||
end.to make_queries_matching(/UPDATE\s+"resource_label_events"/, 16) # 4 updates per batch
|
||||
end
|
||||
|
||||
it 'sets correct namespace_id in every record' do
|
||||
expect { migrate }.to change { issue_label_event1.reload.namespace_id }.from(0).to(issue_namespace.id).and(
|
||||
change { issue_label_event2.reload.namespace_id }.from(0).to(issue_namespace.id)
|
||||
).and(
|
||||
change { mr_label_event1.reload.namespace_id }.from(0).to(project_namespace.id)
|
||||
).and(
|
||||
change { mr_label_event2.reload.namespace_id }.from(0).to(project_namespace.id)
|
||||
).and(
|
||||
change { epic_label_event1.reload.namespace_id }.from(0).to(epic_namespace.id)
|
||||
).and(
|
||||
change { epic_label_event2.reload.namespace_id }.from(0).to(epic_namespace.id)
|
||||
).and(
|
||||
change { invalid_label_event.reload.namespace_id }.from(0).to(epic_namespace.id)
|
||||
).and(
|
||||
change { invalid_label_event.reload.attributes.slice('issue_id', 'epic_id') }.from(
|
||||
{ 'issue_id' => issue.id, 'epic_id' => epic.id }
|
||||
).to(
|
||||
{ 'issue_id' => nil, 'epic_id' => epic.id }
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
@ -815,6 +815,13 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
|
||||
end
|
||||
|
||||
shared_examples "secret CI variables" do
|
||||
before do
|
||||
# Clear memoization to ensure test isolation and prevent cached values
|
||||
# from affecting test results across different scenarios
|
||||
pipeline.clear_memoization(:protected_ref)
|
||||
pipeline.clear_memoization(:git_ref)
|
||||
end
|
||||
|
||||
let(:protected_variable_item) do
|
||||
Gitlab::Ci::Variables::Collection::Item.fabricate(protected_variable)
|
||||
end
|
||||
@ -861,17 +868,21 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
|
||||
let_it_be(:pipeline) { merge_request.pipelines_for_merge_request.first }
|
||||
let_it_be(:job) { create(:ci_build, ref: merge_request.source_branch, tag: false, pipeline: pipeline) }
|
||||
|
||||
context 'when ref is protected' do
|
||||
context 'when the pipeline is protected' do
|
||||
before do
|
||||
create(:protected_branch, :developers_can_merge, name: merge_request.source_branch, project: project)
|
||||
allow(pipeline).to receive(:protected_ref?).and_return(true)
|
||||
end
|
||||
|
||||
it 'does not return protected variables as it is not supported for merge request pipelines' do
|
||||
is_expected.to contain_exactly(unprotected_variable_item)
|
||||
it 'returns protected variables' do
|
||||
is_expected.to contain_exactly(protected_variable_item, unprotected_variable_item)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when ref is not protected' do
|
||||
context 'when the pipeline is not protected' do
|
||||
before do
|
||||
allow(pipeline).to receive(:protected_ref?).and_return(false)
|
||||
end
|
||||
|
||||
it { is_expected.to contain_exactly(unprotected_variable_item) }
|
||||
end
|
||||
end
|
||||
@ -902,10 +913,9 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
|
||||
|
||||
context 'with protected environments' do
|
||||
it 'memoizes the result by environment' do
|
||||
expect(pipeline.project)
|
||||
.to receive(:protected_for?)
|
||||
.with(pipeline.jobs_git_ref)
|
||||
.once.and_return(true)
|
||||
expect(pipeline)
|
||||
.to receive(:protected_ref?)
|
||||
.once.and_return(true)
|
||||
|
||||
expect_next_instance_of(described_class::Group) do |group_variables_builder|
|
||||
expect(group_variables_builder)
|
||||
@ -924,10 +934,9 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
|
||||
|
||||
context 'with unprotected environments' do
|
||||
it 'memoizes the result by environment' do
|
||||
expect(pipeline.project)
|
||||
.to receive(:protected_for?)
|
||||
.with(pipeline.jobs_git_ref)
|
||||
.once.and_return(false)
|
||||
expect(pipeline)
|
||||
.to receive(:protected_ref?)
|
||||
.once.and_return(false)
|
||||
|
||||
expect_next_instance_of(described_class::Group) do |group_variables_builder|
|
||||
expect(group_variables_builder)
|
||||
@ -972,9 +981,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
|
||||
|
||||
context 'with protected environments' do
|
||||
it 'memoizes the result by environment' do
|
||||
expect(pipeline.project)
|
||||
.to receive(:protected_for?)
|
||||
.with(pipeline.jobs_git_ref)
|
||||
expect(pipeline)
|
||||
.to receive(:protected_ref?)
|
||||
.once.and_return(true)
|
||||
|
||||
expect_next_instance_of(described_class::Project) do |project_variables_builder|
|
||||
@ -994,9 +1002,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
|
||||
|
||||
context 'with unprotected environments' do
|
||||
it 'memoizes the result by environment' do
|
||||
expect(pipeline.project)
|
||||
.to receive(:protected_for?)
|
||||
.with(pipeline.jobs_git_ref)
|
||||
expect(pipeline)
|
||||
.to receive(:protected_ref?)
|
||||
.once.and_return(false)
|
||||
|
||||
expect_next_instance_of(described_class::Project) do |project_variables_builder|
|
||||
|
@ -0,0 +1,27 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe QueueBackfillResourceLabelEventsNamespaceIdAndCleanInvalid, migration: :gitlab_main, feature_category: :team_planning do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
gitlab_schema: :gitlab_main,
|
||||
table_name: :resource_label_events,
|
||||
column_name: :id,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE,
|
||||
max_batch_size: described_class::MAX_BATCH_SIZE
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
@ -1,255 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'fast_spec_helper'
|
||||
require 'gitlab/dangerfiles/spec_helper'
|
||||
require_relative '../../../tooling/danger/finalize_batched_background_migration_validator_helper'
|
||||
require_relative '../../../tooling/danger/project_helper'
|
||||
|
||||
RSpec.describe Tooling::Danger::FinalizeBatchedBackgroundMigrationValidatorHelper, feature_category: :database do
|
||||
include_context "with dangerfile"
|
||||
let(:fake_project_helper) { instance_double(Tooling::Danger::ProjectHelper) }
|
||||
let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
|
||||
let(:valid_file_path) { 'db/post_migrate/20230101000000_finalize_migration.rb' }
|
||||
let(:invalid_file_path) { 'app/models/user.rb' }
|
||||
let(:helper) { fake_danger.new(helper: fake_project_helper) }
|
||||
|
||||
describe '#validate_migrations' do
|
||||
context 'when no post migrate files are present' do
|
||||
it 'returns early' do
|
||||
expect(helper).not_to receive(:display_errors)
|
||||
|
||||
helper.validate_migrations([invalid_file_path])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when post migrate files are present' do
|
||||
before do
|
||||
allow(File).to receive(:read).and_call_original
|
||||
allow(File).to receive(:read).with(valid_file_path).and_return(file_content)
|
||||
end
|
||||
|
||||
context 'with no ensure_batched_background_migration_is_finished call' do
|
||||
let(:file_content) do
|
||||
<<~RUBY
|
||||
class QueueMigration < Gitlab::Database::Migration[2.2]
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
)
|
||||
end
|
||||
end
|
||||
RUBY
|
||||
end
|
||||
|
||||
it 'does not report any errors' do
|
||||
expect(helper).to receive(:display_errors).with([])
|
||||
|
||||
helper.validate_migrations([valid_file_path])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with valid content' do
|
||||
let(:file_content) do
|
||||
<<~RUBY
|
||||
class FinalizeMigration < Gitlab::Database::Migration[2.2]
|
||||
def up
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'MigrationClass',
|
||||
table_name: :users,
|
||||
column_name: :id,
|
||||
job_arguments: []
|
||||
)
|
||||
end
|
||||
end
|
||||
RUBY
|
||||
end
|
||||
|
||||
it 'does not report any errors' do
|
||||
expect(helper).to receive(:display_errors).with([])
|
||||
|
||||
helper.validate_migrations([valid_file_path])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with a constant instead of string for job_class_name' do
|
||||
let(:file_content) do
|
||||
<<~RUBY
|
||||
class FinalizeMigration < Gitlab::Database::Migration[2.2]
|
||||
def up
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: MyMigrationClass,
|
||||
table_name: :users,
|
||||
column_name: :id,
|
||||
job_arguments: []
|
||||
)
|
||||
end
|
||||
end
|
||||
RUBY
|
||||
end
|
||||
|
||||
it 'reports an error about job_class_name' do
|
||||
expect(helper).to receive(:display_errors) do |errors|
|
||||
expect(errors.length).to eq(1)
|
||||
expect(errors.first[:message]).to include('The value of job_class_name should be a string in PascalCase')
|
||||
end
|
||||
|
||||
helper.validate_migrations([valid_file_path])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple ensure_batched_background_migration_is_finished calls' do
|
||||
let(:file_content) do
|
||||
<<~RUBY
|
||||
class FinalizeMigration < Gitlab::Database::Migration[2.2]
|
||||
|
||||
def up
|
||||
first_finalize_migration
|
||||
second_finalize_migration
|
||||
end
|
||||
|
||||
def first_finalize_migration
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'MigrationClass1',
|
||||
table_name: :users,
|
||||
column_name: :id,
|
||||
job_arguments: []
|
||||
)
|
||||
end
|
||||
def second_finalize_migration
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'MigrationClass2',
|
||||
table_name: :projects,
|
||||
column_name: :id,
|
||||
job_arguments: []
|
||||
)
|
||||
end
|
||||
end
|
||||
RUBY
|
||||
end
|
||||
|
||||
it 'reports an error about multiple migrations' do
|
||||
expect(helper).to receive(:display_errors) do |errors|
|
||||
expect(errors.length).to eq(1)
|
||||
expect(errors.first[:message]).to include('There should only be one finalize batched background migration')
|
||||
end
|
||||
|
||||
helper.validate_migrations([valid_file_path])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple errors' do
|
||||
let(:file_content) do
|
||||
<<~RUBY
|
||||
class FinalizeMigration < Gitlab::Database::Migration[2.2]
|
||||
|
||||
def up
|
||||
first_finalize_migration
|
||||
second_finalize_migration
|
||||
end
|
||||
|
||||
def first_finalize_migration
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: MigrationClass1,
|
||||
table_name: :users,
|
||||
column_name: :id,
|
||||
job_arguments: []
|
||||
)
|
||||
end
|
||||
def second_finalize_migration
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: MigrationClass2,
|
||||
table_name: :projects,
|
||||
column_name: :id,
|
||||
job_arguments: []
|
||||
)
|
||||
end
|
||||
end
|
||||
RUBY
|
||||
end
|
||||
|
||||
it 'reports both errors' do
|
||||
expect(helper).to receive(:display_errors) do |errors|
|
||||
expect(errors.length).to eq(2)
|
||||
expect(errors.pluck(:message)).to include(
|
||||
a_string_matching('There should only be one finalize batched background migration'),
|
||||
a_string_matching('The value of job_class_name should be a string in PascalCase')
|
||||
)
|
||||
end
|
||||
|
||||
helper.validate_migrations([valid_file_path])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#validate_job_class_name_format' do
|
||||
it 'returns nil when job_class_name is a string' do
|
||||
file_content = <<~RUBY
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: "MigrationClass",
|
||||
table_name: :users
|
||||
)
|
||||
RUBY
|
||||
|
||||
expect(helper.validate_job_class_name_format(valid_file_path, file_content)).to be_nil
|
||||
end
|
||||
|
||||
it 'returns an error when job_class_name value is a constant' do
|
||||
file_content = <<~RUBY
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: MigrationClass,
|
||||
table_name: :users
|
||||
)
|
||||
RUBY
|
||||
|
||||
result = helper.validate_job_class_name_format(valid_file_path, file_content)
|
||||
expect(result).to be_a(Hash)
|
||||
expect(result[:message]).to include('The value of job_class_name should be a string in PascalCase')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#validate_migration_count' do
|
||||
it 'returns nil when exactly one migration is present' do
|
||||
file_content = <<~RUBY
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'MigrationClass',
|
||||
table_name: :users
|
||||
)
|
||||
RUBY
|
||||
|
||||
expect(helper.validate_migration_count(valid_file_path, file_content)).to be_nil
|
||||
end
|
||||
|
||||
it 'returns an error when multiple migrations are present' do
|
||||
file_content = <<~RUBY
|
||||
ensure_batched_background_migration_is_finished(job_class_name: 'First')
|
||||
ensure_batched_background_migration_is_finished(job_class_name: 'Second')
|
||||
RUBY
|
||||
|
||||
result = helper.validate_migration_count(valid_file_path, file_content)
|
||||
expect(result).to be_a(Hash)
|
||||
expect(result[:message]).to include('There should only be one finalize batched background migration')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#display_errors' do
|
||||
it 'returns nil when no errors are present' do
|
||||
expect(helper.display_errors([])).to be_nil
|
||||
end
|
||||
|
||||
it 'calls fail with formatted error messages when errors are present' do
|
||||
errors = [
|
||||
{ file: 'file1.rb', message: 'Error 1' },
|
||||
{ file: 'file2.rb', message: 'Error 2' }
|
||||
]
|
||||
|
||||
expected_messages = [
|
||||
'**file1.rb**: Error 1',
|
||||
'**file2.rb**: Error 2'
|
||||
]
|
||||
|
||||
expect(helper).to receive(:fail).with(expected_messages)
|
||||
|
||||
helper.display_errors(errors)
|
||||
end
|
||||
end
|
||||
end
|
@ -1,61 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Tooling
|
||||
module Danger
|
||||
module FinalizeBatchedBackgroundMigrationValidatorHelper
|
||||
POST_MIGRATE_PATH = [
|
||||
'db/post_migrate',
|
||||
'ee/db/geo/post_migrate',
|
||||
'ee/db/embedding/post_migrate'
|
||||
].freeze
|
||||
|
||||
def validate_migrations(changed_files)
|
||||
post_migrate_migrations = changed_files.select { |f| f.match?(%r{^(?:#{POST_MIGRATE_PATH.join('|')})/.+$}) }
|
||||
|
||||
return if post_migrate_migrations.empty?
|
||||
|
||||
errors = []
|
||||
|
||||
post_migrate_migrations.each do |file_path|
|
||||
file_content = File.read(file_path)
|
||||
|
||||
# Check only finalize batched background migration files
|
||||
next unless file_content.include?('ensure_batched_background_migration_is_finished')
|
||||
|
||||
errors << validate_job_class_name_format(file_path, file_content)
|
||||
|
||||
errors << validate_migration_count(file_path, file_content)
|
||||
end
|
||||
display_errors(errors.compact)
|
||||
end
|
||||
|
||||
def validate_job_class_name_format(file_path, file_content)
|
||||
return unless file_content.include?('job_class_name')
|
||||
|
||||
return if file_content.match?(/job_class_name:[ \t]*["'][A-Za-z0-9]+["']/)
|
||||
|
||||
{ file: file_path,
|
||||
message: "The value of job_class_name should be a string in PascalCase e.g 'FinalizeMigrationClass' " }
|
||||
end
|
||||
|
||||
def validate_migration_count(file_path, file_content)
|
||||
occurrences = file_content.scan(/ensure_batched_background_migration_is_finished/).count
|
||||
return unless occurrences != 1
|
||||
|
||||
{ file: file_path, message: "There should only be one finalize batched background migration per class" }
|
||||
end
|
||||
|
||||
def display_errors(errors)
|
||||
return if errors.empty?
|
||||
|
||||
failure_messages = errors.map do |error|
|
||||
"**#{error[:file]}**: #{error[:message]}"
|
||||
end
|
||||
|
||||
return unless failure_messages.any?
|
||||
|
||||
fail(failure_messages)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
Reference in New Issue
Block a user