Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-09-12 15:09:50 +00:00
parent f9b86eaa38
commit fe428c5f09
52 changed files with 1169 additions and 46 deletions

View File

@ -67,7 +67,7 @@ eslint:
extends:
- .static-analysis-base
- .yarn-cache
- .frontend:rules:default-frontend-jobs-with-docs-changes
- .frontend:rules:default-frontend-jobs
needs: ['generate-apollo-graphql-schema']
variables:
USE_BUNDLE_INSTALL: "false"
@ -75,6 +75,17 @@ eslint:
- yarn_install_script
- run_timed_command "yarn run lint:eslint:all"
eslint-docs:
extends:
- .static-analysis-base
- .yarn-cache
- .frontend:rules:default-frontend-jobs-with-docs-changes
variables:
USE_BUNDLE_INSTALL: "false"
script:
- yarn_install_script
- run_timed_command "scripts/frontend/lint_docs_links.mjs"
haml-lint:
extends:
- .static-analysis-base

View File

@ -712,7 +712,7 @@ gem 'cvss-suite', '~> 3.0.1', require: 'cvss_suite' # rubocop:todo Gemfile/Missi
gem 'arr-pm', '~> 0.0.12' # rubocop:todo Gemfile/MissingFeatureCategory
# Remote Development
gem 'devfile', '~> 0.0.26.pre.alpha1', feature_category: :workspaces
gem 'devfile', '~> 0.0.28.pre.alpha1', feature_category: :workspaces
# Apple plist parsing
gem 'CFPropertyList', '~> 3.0.0' # rubocop:todo Gemfile/MissingFeatureCategory

View File

@ -114,9 +114,10 @@
{"name":"deprecation_toolkit","version":"1.5.1","platform":"ruby","checksum":"a8a1ab1a19ae40ea12560b65010e099f3459ebde390b76621ef0c21c516a04ba"},
{"name":"derailed_benchmarks","version":"2.1.2","platform":"ruby","checksum":"eaadc6206ceeb5538ff8f5e04a0023d54ebdd95d04f33e8960fb95a5f189a14f"},
{"name":"descendants_tracker","version":"0.0.4","platform":"ruby","checksum":"e9c41dd4cfbb85829a9301ea7e7c48c2a03b26f09319db230e6479ccdc780897"},
{"name":"devfile","version":"0.0.26.pre.alpha1","platform":"arm64-darwin","checksum":"9865844fb87a616d3fa8f6bdc4b2c3fc7c8d67b7126b39afc7049ffa3dd419ba"},
{"name":"devfile","version":"0.0.26.pre.alpha1","platform":"ruby","checksum":"2012962fa924e51b5444f22c68055a6f02c72cb129c5180876468a0361dec9e4"},
{"name":"devfile","version":"0.0.26.pre.alpha1","platform":"x86_64-linux","checksum":"050e1a996fbae10ad6b9db00201b683bf9a11987d2869672172e7342003636ef"},
{"name":"devfile","version":"0.0.28.pre.alpha1","platform":"aarch64-linux","checksum":"fc8a628b8f7ad3665122e0e74cadd33aa405f2cdf15c1a2d199ce40f5b4db84c"},
{"name":"devfile","version":"0.0.28.pre.alpha1","platform":"arm64-darwin","checksum":"547fcd5e55fe95f592d1d1180ffe32695bfa95ec426506807e74bc17da12cc45"},
{"name":"devfile","version":"0.0.28.pre.alpha1","platform":"ruby","checksum":"59c27089f931d072500f53c9f3a24bcf5a554ddba4040d9bcd509aa8ea7acf7b"},
{"name":"devfile","version":"0.0.28.pre.alpha1","platform":"x86_64-linux","checksum":"6856b744d7a3ba7f7fdafcc151d7c4b22789a65ea7be8ce9582350bbd7a7a48e"},
{"name":"device_detector","version":"1.0.0","platform":"ruby","checksum":"b800fb3150b00c23e87b6768011808ac1771fffaae74c3238ebaf2b782947a7d"},
{"name":"devise","version":"4.9.3","platform":"ruby","checksum":"480638d6c51b97f56da6e28d4f3e2a1b8e606681b316aa594b87c6ab94923488"},
{"name":"devise-two-factor","version":"4.1.1","platform":"ruby","checksum":"c95f5b07533e62217aaed3c386874d94e2d472fb5f2b6598afe8600fc17a8b95"},

View File

@ -496,7 +496,7 @@ GEM
thor (>= 0.19, < 2)
descendants_tracker (0.0.4)
thread_safe (~> 0.3, >= 0.3.1)
devfile (0.0.26.pre.alpha1)
devfile (0.0.28.pre.alpha1)
device_detector (1.0.0)
devise (4.9.3)
bcrypt (~> 3.0)
@ -2008,7 +2008,7 @@ DEPENDENCIES
declarative_policy (~> 1.1.0)
deprecation_toolkit (~> 1.5.1)
derailed_benchmarks
devfile (~> 0.0.26.pre.alpha1)
devfile (~> 0.0.28.pre.alpha1)
device_detector
devise (~> 4.9.3)
devise-pbkdf2-encryptable (~> 0.0.0)!

View File

@ -11,6 +11,37 @@ import {
applyGitLabUIConfig({
translations: {
'CloseButton.title': __('Close'),
'DuoChatContextItemPopover.DisabledReason': __('This item is disabled'),
'GlAlert.closeButtonTitle': __('Dismiss'),
'GlBanner.closeButtonTitle': __('Dismiss'),
'GlBreadcrumb.showMoreLabel': __('Show more breadcrumbs'),
'GlBroadcastMessage.closeButtonTitle': __('Dismiss'),
'GlDuoChatContextItemMenu.emptyStateMessage': s__('DuoChat|No results found'),
'GlDuoChatContextItemMenu.loadingMessage': __('Loading...'),
'GlDuoChatContextItemMenu.searchInputPlaceholder': s__('DuoChat|Search %{categoryLabel}...'),
'GlDuoChatContextItemMenu.selectedContextItemsTitle': s__('DuoChat|Included references'),
'GlDuoWorkflowPanel.collapseButtonTitle': s__('GitLabDuo|Collapse'),
'GlDuoWorkflowPanel.expandButtonTitle': s__('GitLabDuo|Expand'),
'GlDuoWorkflowPrompt.cancelButtonText': s__('GitLabDuo|Cancel'),
'GlDuoWorkflowPrompt.confirmButtonText': s__('GitLabDuo|Generate plan'),
'GlDuoWorkflowPrompt.imageDescription': s__(
'GitLabDuo|It should have any tools necessary for the workflow installed.',
),
'GlDuoWorkflowPrompt.imageLabel': s__('GitLabDuo|Image'),
'GlDuoWorkflowPrompt.imageLabelDescription': s__(
'GitLabDuo|The container image to run the workflow in.',
),
'GlDuoWorkflowPrompt.promptDescription': s__(
'GitLabDuo|Be specific and include any requirements.',
),
'GlDuoWorkflowPrompt.promptLabel': __('Description'),
'GlDuoWorkflowPrompt.promptLabelDescription': s__(
'GitLabDuo|What would you like to do and how.',
),
'GlDuoWorkflowPrompt.title': s__('GitLabDuo|Goal'),
'GlModal.closeButtonTitle': __('Close'),
'GlToken.closeButtonTitle': __('Remove'),
'GlSearchBoxByType.input.placeholder': __('Search'),
'GlSearchBoxByType.clearButtonTitle': __('Clear'),
'GlSorting.sortAscending': __('Sort direction: Ascending'),
@ -19,7 +50,6 @@ applyGitLabUIConfig({
'GlKeysetPagination.prevText': PREV,
'GlKeysetPagination.navigationLabel': s__('Pagination|Pagination'),
'GlKeysetPagination.nextText': NEXT,
'GlPagination.labelFirstPage': LABEL_FIRST_PAGE,
'GlPagination.labelLastPage': LABEL_LAST_PAGE,
'GlPagination.labelNextPage': LABEL_NEXT_PAGE,
@ -27,7 +57,6 @@ applyGitLabUIConfig({
'GlPagination.labelPrevPage': LABEL_PREV_PAGE,
'GlPagination.nextText': NEXT,
'GlPagination.prevText': PREV,
'GlCollapsibleListbox.srOnlyResultsLabel': (count) => n__('%d result', '%d results', count),
},
});

View File

@ -379,7 +379,7 @@ export default {
<page-heading :heading="$options.i18n.pageTitle">
<template #actions>
<gl-button
v-if="glFeatures.editBranchRules && branchRule"
v-if="glFeatures.editBranchRules && branchRule && canAdminProtectedBranches"
v-gl-modal="$options.deleteModalId"
data-testid="delete-rule-button"
category="secondary"
@ -396,7 +396,7 @@ export default {
<crud-component :title="$options.i18n.ruleTarget" data-testid="rule-target-card">
<template #actions>
<gl-button
v-if="glFeatures.editBranchRules && !isPredefinedRule"
v-if="glFeatures.editBranchRules && !isPredefinedRule && canAdminProtectedBranches"
v-gl-modal="$options.editModalId"
data-testid="edit-rule-name-button"
size="small"

View File

@ -253,6 +253,7 @@ export default {
iid: this.issuableIid,
webUrl: this.issuable.webUrl,
fullPath: this.workItemFullPath,
workItemType: this.type.toLowerCase(),
});
},
navigateToIssuable() {

View File

@ -53,7 +53,7 @@ export default {
return this.activeItem.referencePath.split(delimiter)[0];
},
modalIsGroup() {
return this.issuableType === TYPE_EPIC;
return this.issuableType.toLowerCase() === TYPE_EPIC;
},
headerReference() {
const path = this.activeItemFullPath.substring(this.activeItemFullPath.lastIndexOf('/') + 1);

View File

@ -1,8 +1,8 @@
<script>
import { produce } from 'immer';
import { GlAlert, GlButton, GlLink } from '@gitlab/ui';
import { GlAlert, GlButton, GlLink, GlBadge } from '@gitlab/ui';
import { s__ } from '~/locale';
import { s__, n__, sprintf } from '~/locale';
import { helpPagePath } from '~/helpers/help_page_helper';
import CrudComponent from '~/vue_shared/components/crud_component.vue';
@ -17,6 +17,7 @@ import {
LINKED_CATEGORIES_MAP,
LINKED_ITEMS_ANCHOR,
WORKITEM_RELATIONSHIPS_SHOWLABELS_LOCALSTORAGEKEY,
sprintfWorkItem,
} from '../../constants';
import WorkItemMoreActions from '../shared/work_item_more_actions.vue';
@ -29,6 +30,7 @@ export default {
GlAlert,
GlButton,
GlLink,
GlBadge,
CrudComponent,
WorkItemRelationshipList,
WorkItemAddRelationshipForm,
@ -129,6 +131,17 @@ export default {
isEmptyRelatedWorkItems() {
return !this.error && this.linkedWorkItems.length === 0;
},
countBadgeAriaLabel() {
const message = sprintf(
n__(
'WorkItem|%{workItemType} has 1 linked item',
'WorkItem|%{workItemType} has %{itemCount} linked items',
this.linkedWorkItemsCount,
),
{ itemCount: this.linkedWorkItemsCount },
);
return sprintfWorkItem(message, this.workItemType);
},
},
mounted() {
this.showLabels = getShowLabelsFromLocalStorage(
@ -224,12 +237,20 @@ export default {
ref="widget"
:anchor-id="widgetName"
:title="$options.i18n.title"
:count="linkedWorkItemsCount"
icon="link"
:is-loading="isLoading"
is-collapsible
data-testid="work-item-relationships"
>
<template #count>
<gl-badge
:aria-label="countBadgeAriaLabel"
data-testid="linked-items-count-bage"
variant="muted"
>
{{ linkedWorkItemsCount }}
</gl-badge>
</template>
<template #actions>
<gl-button
v-if="canAdminWorkItemLink"

View File

@ -386,6 +386,9 @@ export default {
page_before: this.pageParams.beforeCursor ?? undefined,
};
},
activeWorkItemType() {
return this.workItemType || this.activeItem?.workItemType;
},
},
watch: {
eeWorkItemUpdateCount() {
@ -559,6 +562,7 @@ export default {
v-if="workItemDrawerEnabled"
:active-item="activeItem"
:open="isItemSelected"
:issuable-type="activeWorkItemType"
@close="activeItem = null"
@addChild="refetchItems"
@workItemDeleted="deleteItem"

View File

@ -14,6 +14,7 @@ module Groups
push_force_frontend_feature_flag(:create_group_level_work_items,
group&.create_group_level_work_items_feature_flag_enabled?)
push_force_frontend_feature_flag(:glql_integration, group&.glql_integration_feature_flag_enabled?)
push_frontend_feature_flag(:issues_list_drawer, group)
end
before_action :handle_new_work_item_path, only: [:show]

View File

@ -5,5 +5,5 @@ introduced_by_mr: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165303
feature_category: self-hosted_models
milestone: '17.4'
saved_to_database: true
scope: [Instance, User]
scope: [Instance]
streamed: true

View File

@ -5,5 +5,5 @@ introduced_by_mr: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165321
feature_category: self-hosted_models
milestone: '17.4'
saved_to_database: true
scope: [Instance, User]
scope: [Instance]
streamed: true

View File

@ -5,5 +5,5 @@ introduced_by_mr: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165489
feature_category: self-hosted_models
milestone: '17.4'
saved_to_database: true
scope: [Project]
scope: [Instance]
streamed: true

View File

@ -5,5 +5,5 @@ introduced_by_mr: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165480
feature_category: self-hosted_models
milestone: '17.4'
saved_to_database: true
scope: [Instance, User]
scope: [Instance]
streamed: true

View File

@ -5,5 +5,5 @@ introduced_by_mr: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165520
feature_category: self-hosted_models
milestone: '17.4'
saved_to_database: true
scope: [Instance, User]
scope: [Instance]
streamed: true

View File

@ -751,8 +751,12 @@
- 1
- - search_zoekt_orphaned_index_event
- 1
- - search_zoekt_orphaned_repo_event
- 1
- - search_zoekt_project_transfer
- 1
- - search_zoekt_repo_marked_as_to_delete_event
- 1
- - secrets_management_provision_project_secrets_manager
- 1
- - security_create_security_policy_project

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddDurationToCiFinishedPipelinesHourly < ClickHouse::Migration
def up
execute <<~SQL
ALTER TABLE ci_finished_pipelines_hourly
ADD COLUMN IF NOT EXISTS duration_quantile AggregateFunction(quantile, UInt64)
SQL
end
def down
execute <<~SQL
ALTER TABLE ci_finished_pipelines_hourly
DROP COLUMN IF EXISTS duration_quantile
SQL
end
end

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
class AddDurationToCiFinishedPipelinesHourlyMv < ClickHouse::Migration
def up
execute <<~SQL
ALTER TABLE ci_finished_pipelines_hourly_mv
MODIFY QUERY
SELECT
path,
status,
source,
ref,
toStartOfInterval(started_at, INTERVAL 1 hour) AS started_at_bucket,
countState() AS count_pipelines,
quantileState(duration) AS duration_quantile
FROM ci_finished_pipelines
GROUP BY path, status, source, ref, started_at_bucket
SQL
end
def down
execute <<~SQL
ALTER TABLE ci_finished_pipelines_hourly_mv
MODIFY QUERY
SELECT
path,
status,
source,
ref,
toStartOfInterval(started_at, INTERVAL 1 hour) AS started_at_bucket,
countState() AS count_pipelines
FROM ci_finished_pipelines
GROUP BY path, status, source, ref, started_at_bucket
SQL
end
end

View File

@ -0,0 +1,10 @@
---
migration_job_name: DeleteOrphanedBuildRecords
description: Delete records from p_ci_builds that have commit_id with no corresponding record at p_ci_pipelines.id
feature_category: continuous_integration
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165727
milestone: '17.4'
queued_migration_version: 20240910154923
# Replace with the approximate date you think it's best to ensure the completion of this BBM.
finalize_after: '2024-09-17'
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,10 @@
---
migration_job_name: PopulateDetumbledEmailInEmails
description: Populate the detumbled_email column in the emails table
feature_category: user_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/162924
milestone: '17.4'
queued_migration_version: 20240813190312
# Replace with the approximate date you think it's best to ensure the completion of this BBM.
finalize_after: '2024-09-15'
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddNullProjectIdIndexToZoektRepos < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.4'
INDEX_NAME = "index_zoekt_repos_with_missing_project_id"
def up
add_concurrent_index :zoekt_repositories, :project_id, name: INDEX_NAME, where: "project_id IS NULL"
end
def down
remove_concurrent_index_by_name(:zoekt_repositories, name: INDEX_NAME)
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class QueuePopulateDetumbledEmailInEmails < Gitlab::Database::Migration[2.2]
milestone '17.4'
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = "PopulateDetumbledEmailInEmails"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 10_000
SUB_BATCH_SIZE = 500
def up
queue_batched_background_migration(
MIGRATION,
:emails,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :emails, :id, [])
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class TruncateCiFinishedBuildChSyncEventsIfClickHouseNotConfigured < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.4'
def up
truncate_tables!('p_ci_finished_build_ch_sync_events') unless ::Gitlab::ClickHouse.configured?
end
def down
# no-op
end
end

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
class QueueDeleteOrphanedBuildRecords < Gitlab::Database::Migration[2.2]
milestone '17.4'
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = 'DeleteOrphanedBuildRecords'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 5000
SUB_BATCH_SIZE = 1000
MIN_PIPELINE_ID = 1104078878
def up
queue_batched_background_migration(
MIGRATION,
:p_ci_builds,
:commit_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
batch_class_name: 'LooseIndexScanBatchingStrategy',
sub_batch_size: SUB_BATCH_SIZE,
batch_min_value: batch_min_value
)
end
def down
delete_batched_background_migration(MIGRATION, :p_ci_builds, :commit_id, [])
end
def batch_min_value
if Gitlab.com_except_jh?
MIN_PIPELINE_ID
else
1
end
end
end

View File

@ -0,0 +1 @@
352c0cf1c8745c07b6e4fe62639615d16fa6d910ae8ae1bc4c34c2eea03911d6

View File

@ -0,0 +1 @@
07fc42850a68e85f82ed291cf268572813d204211d90b3fadc82bca33544e40e

View File

@ -0,0 +1 @@
dad7fc045341f9c756470a8caf4f913a57fb07b7059d7686da1f28cf40cf3e26

View File

@ -0,0 +1 @@
bccaedfd8d93e34c78f369e80904563e4af5bc44bae61bf68c2e21c27d7d5a09

View File

@ -30966,6 +30966,8 @@ CREATE INDEX index_zoekt_replicas_on_namespace_id_enabled_namespace_id ON zoekt_
CREATE INDEX index_zoekt_replicas_on_state ON zoekt_replicas USING btree (state);
CREATE INDEX index_zoekt_repos_with_missing_project_id ON zoekt_repositories USING btree (project_id) WHERE (project_id IS NULL);
CREATE INDEX index_zoekt_repositories_on_project_id ON zoekt_repositories USING btree (project_id);
CREATE INDEX index_zoekt_repositories_on_state ON zoekt_repositories USING btree (state);

View File

@ -52,7 +52,7 @@ In certain circumstances, GitLab might run in a
### PostgreSQL
PostgreSQL is the only supported database and is bundled with the Linux package.
[PostgreSQL](https://www.postgresql.org/) is the only supported database and is bundled with the Linux package.
You can also use an [external PostgreSQL database](https://docs.gitlab.com/omnibus/settings/database.html#using-a-non-packaged-postgresql-database-management-server).
Depending on the [number of users](../administration/reference_architectures/index.md),
@ -210,21 +210,27 @@ The recommended number of threads is dependent on several factors, including tot
## Redis
Redis stores all user sessions and the background task queue.
[Redis](https://redis.io/) stores all user sessions and background tasks
and requires about 25 kB per user on average.
The requirements for Redis are as follows:
In GitLab 16.0 and later, Redis 6.x or 7.x is required.
For more information about end-of-life dates, see the
[Redis documentation](https://redis.io/docs/latest/operate/rs/installing-upgrading/product-lifecycle/).
- Redis 6.x or 7.x is required in GitLab 16.0 and later. However, you should upgrade to
Redis 6.2.14 or later as [Redis 6.0 is no longer supported](https://endoflife.date/redis).
- Redis Cluster mode is not supported. Redis Standalone must be used, with or without HA.
- Storage requirements for Redis are minimal, about 25 kB per user on average.
- [Redis eviction mode](../administration/redis/replication_and_failover_external.md#setting-the-eviction-policy) set appropriately.
For Redis:
- Use a standalone instance (with or without high availability).
Redis Cluster is not supported.
- Set the [eviction policy](../administration/redis/replication_and_failover_external.md#setting-the-eviction-policy) as appropriate.
## Sidekiq
Sidekiq processes the background jobs with a multi-threaded process.
This process starts with the entire Rails stack (200 MB+) but it can grow over time due to memory leaks.
On a very active server (10,000 billable users) the Sidekiq process can use 1 GB+ of memory.
[Sidekiq](https://sidekiq.org/) uses a multi-threaded process for background jobs.
This process initially consumes more than 200 MB of memory
and might grow over time due to memory leaks.
On a very active server with more than 10,000 billable users,
the Sidekiq process might consume more than 1 GB of memory.
## Prometheus

View File

@ -479,11 +479,11 @@ Audit event types belong to the following product categories.
| Name | Description | Saved to database | Streamed | Introduced in | Scope |
|:------------|:------------|:------------------|:---------|:--------------|:--------------|
| [`self_hosted_model_created`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165303) | A new self-hosted model configuration was added | **{check-circle}** Yes | **{check-circle}** Yes | GitLab [17.4](https://gitlab.com/gitlab-org/gitlab/-/issues/477999) | Instance, User |
| [`self_hosted_model_destroyed`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165321) | A new self-hosted model configuration was destroyed | **{check-circle}** Yes | **{check-circle}** Yes | GitLab [17.4](https://gitlab.com/gitlab-org/gitlab/-/issues/477999) | Instance, User |
| [`self_hosted_model_feature_changed`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165489) | A self-hosted model feature had its configuration changed | **{check-circle}** Yes | **{check-circle}** Yes | GitLab [17.4](https://gitlab.com/gitlab-org/gitlab/-/issues/463215) | Project |
| [`self_hosted_model_terms_accepted`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165480) | Terms for usage of self-hosted models were accepted | **{check-circle}** Yes | **{check-circle}** Yes | GitLab [17.4](https://gitlab.com/gitlab-org/gitlab/-/issues/477999) | Instance, User |
| [`self_hosted_model_updated`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165520) | A self-hosted model configuration was updated | **{check-circle}** Yes | **{check-circle}** Yes | GitLab [17.4](https://gitlab.com/gitlab-org/gitlab/-/issues/483295) | Instance, User |
| [`self_hosted_model_created`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165303) | A new self-hosted model configuration was added | **{check-circle}** Yes | **{check-circle}** Yes | GitLab [17.4](https://gitlab.com/gitlab-org/gitlab/-/issues/477999) | Instance |
| [`self_hosted_model_destroyed`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165321) | A new self-hosted model configuration was destroyed | **{check-circle}** Yes | **{check-circle}** Yes | GitLab [17.4](https://gitlab.com/gitlab-org/gitlab/-/issues/477999) | Instance |
| [`self_hosted_model_feature_changed`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165489) | A self-hosted model feature had its configuration changed | **{check-circle}** Yes | **{check-circle}** Yes | GitLab [17.4](https://gitlab.com/gitlab-org/gitlab/-/issues/463215) | Instance |
| [`self_hosted_model_terms_accepted`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165480) | Terms for usage of self-hosted models were accepted | **{check-circle}** Yes | **{check-circle}** Yes | GitLab [17.4](https://gitlab.com/gitlab-org/gitlab/-/issues/477999) | Instance |
| [`self_hosted_model_updated`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165520) | A self-hosted model configuration was updated | **{check-circle}** Yes | **{check-circle}** Yes | GitLab [17.4](https://gitlab.com/gitlab-org/gitlab/-/issues/483295) | Instance |
### Source code management

View File

@ -290,6 +290,9 @@ When a user tries to sign in with Group SSO, GitLab attempts to find or create a
> - **Remember me** checkbox [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/121569) in GitLab 15.7.
NOTE:
If the user is an [enterprise user](../../enterprise_user/index.md) of that group, the following steps do not apply. The enterprise user must instead [sign in with a SAML account that has the same email as the GitLab account](#returning-users-automatic-identity-relinking). This allows GitLab to link the SAML account to the existing account.
To link SAML to your existing GitLab.com account:
1. Sign in to your GitLab.com account. [Reset your password](https://gitlab.com/users/password/new)
@ -425,7 +428,7 @@ automatically confirms user accounts. Users still receive an
Prerequisites:
- You must have the Owner role for the group that the enterprise user belongs to.
- The group SSO must be enabled.
- Group SSO must be enabled.
You can disable password authentication for the group's [enterprise users](../../enterprise_user/index.md).
This stops enterprise users from using their username and password to authenticate.
@ -443,6 +446,11 @@ To disable password authentication for enterprise users:
1. Under **Configuration**, select **Disable password authentication for enterprise users**.
1. Select **Save changes**.
#### Returning users (Automatic Identity Relinking)
If an enterprise user is removed from the group and then returns, they can sign in with their enterprise SSO account.
As long as the user's email address in the identity provider remains the same as the email address on the existing GitLab account, the SSO identity is automatically linked to the account and the user can sign in without any issues.
### Block user access
To rescind a user's access to the group when only SAML SSO is configured, either:

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class DeleteOrphanedBuildRecords < BatchedMigrationJob
operation_name :delete_orphaned_build_records
feature_category :continuous_integration
class CiPipeline < ::Ci::ApplicationRecord
self.table_name = :p_ci_pipelines
self.primary_key = :id
end
def perform
distinct_each_batch do |batch|
pipeline_ids = batch.pluck(batch_column)
pipelines_query = CiPipeline
.where('p_ci_builds.commit_id = p_ci_pipelines.id')
.where('p_ci_builds.partition_id = p_ci_pipelines.partition_id')
.select(1)
base_relation
.where(batch_column => pipeline_ids)
.where('NOT EXISTS (?)', pipelines_query)
.delete_all
end
end
private
def base_relation
define_batchable_model(batch_table, connection: connection, primary_key: :id)
.where(batch_column => start_id..end_id)
end
end
end
end

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class PopulateDetumbledEmailInEmails < BatchedMigrationJob
scope_to ->(relation) { relation.where(detumbled_email: nil) }
operation_name :populate_detumbled_email
feature_category :user_management
EMAIL_REGEXP = /\A[^@\s]+@[^@\s]+\z/
def perform
each_sub_batch do |sub_batch|
sub_batch.each do |email|
email.update!(detumbled_email: normalize_email(email.email))
end
end
end
private
# Method copied from lib/gitlab/utils/email.rb
def normalize_email(email)
return email unless email.is_a?(String)
return email unless EMAIL_REGEXP.match?(email.strip)
portions = email.downcase.strip.split('@')
mailbox = portions.shift
domain = portions.join
mailbox_root = mailbox.split('+')[0]
# Gmail addresses strip the "." from their emails.
# For example, user.name@gmail.com is the same as username@gmail.com
mailbox_root = mailbox_root.tr('.', '') if domain == 'gmail.com'
[mailbox_root, domain].join('@')
end
end
end
end

View File

@ -13,6 +13,10 @@ variables:
DEFAULT_SAST_EXCLUDED_PATHS: "spec, test, tests, tmp"
SAST_EXCLUDED_PATHS: "$DEFAULT_SAST_EXCLUDED_PATHS"
SCAN_KUBERNETES_MANIFESTS: "false"
GITLAB_ADVANCED_SAST_SCA_FILENAME: "GLAS_SCA.json"
GITLAB_ADVANCED_SAST_SCA_TEMP_OUTPUT_FILENAME: "sca.json"
GLAS_STATIC_REACHABILITY_MATCHER_VERSION: "v0.0.3"
GITLAB_CDX_TO_REACHABILITY_CALCULATOR_VERSION: "v1.1.0"
sast:
stage: test
@ -93,6 +97,89 @@ gitlab-advanced-sast:
- '**/*.mjs'
- '**/*.cs'
.static-reachability-rules:
rules:
- if: $SAST_DISABLED == 'true' || $SAST_DISABLED == '1'
when: never
- if: $SAST_EXCLUDED_ANALYZERS =~ /gitlab-advanced-sast/
when: never
- if: $STATIC_REACHABILITY_ENABLED != 'true'
when: never
# Add the job to merge request pipelines if there's an open merge request.
- if: $CI_PIPELINE_SOURCE == "merge_request_event" &&
$GITLAB_FEATURES =~ /\bsast_advanced\b/
exists:
- '**/*.py'
- if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline.
when: never
# If there's no open merge request, add it to a *branch* pipeline instead.
- if: $CI_COMMIT_BRANCH &&
$GITLAB_FEATURES =~ /\bsast_advanced\b/
exists:
- '**/*.py'
gitlab-static-reachability:
extends:
- gitlab-advanced-sast
variables:
SAST_SCANNER_ALLOWED_CLI_OPTS: --sca-output-path ${GITLAB_ADVANCED_SAST_SCA_FILENAME}
before_script:
- |
echo keep-builtin-rules: false >> /lightz-aio_default_config.yaml
artifacts:
paths:
- $GITLAB_ADVANCED_SAST_SCA_FILENAME
rules:
- if: $SAST_DISABLED == 'true' || $SAST_DISABLED == '1'
when: never
- if: $SAST_EXCLUDED_ANALYZERS =~ /gitlab-advanced-sast/
when: never
- if: $STATIC_REACHABILITY_ENABLED != 'true'
when: never
# Add the job to merge request pipelines if there's an open merge request.
- if: $CI_PIPELINE_SOURCE == "merge_request_event" &&
$GITLAB_FEATURES =~ /\bsast_advanced\b/
exists:
- '**/*.py'
- if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline.
when: never
# If there's no open merge request, add it to a *branch* pipeline instead.
- if: $CI_COMMIT_BRANCH &&
$GITLAB_FEATURES =~ /\bsast_advanced\b/
exists:
- '**/*.py'
enrich-cdx-results:
stage: .post
extends: .static-reachability-rules
variables:
LIGHTZ_PATH: $GITLAB_ADVANCED_SAST_SCA_FILENAME
RESULTS_PATH: $GITLAB_ADVANCED_SAST_SCA_TEMP_OUTPUT_FILENAME
SCA_TO_SARIF_PROJECT_ID: 60962090
SBOM_REACHABILITY_CALCULATOR_PROJECT_ID: 60621659
DEPENDENCY_SCANNING_PATTERN: "**/gl-sbom-*.cdx.json"
GLAS_REPORT: $GITLAB_ADVANCED_SAST_SCA_TEMP_OUTPUT_FILENAME
image:
name: "$CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG"
entrypoint: [""]
before_script:
- |
wget "gitlab.com/api/v4/projects/${SCA_TO_SARIF_PROJECT_ID}/packages/generic/sca-to-sarif-matcher/${GLAS_STATIC_REACHABILITY_MATCHER_VERSION}/matcher" \
-O /matcher
- chmod +x /matcher
- |
wget "gitlab.com/api/v4/projects/${SBOM_REACHABILITY_CALCULATOR_PROJECT_ID}/packages/generic/sca-to-sarif-reachability_calculator/${GITLAB_CDX_TO_REACHABILITY_CALCULATOR_VERSION}/reachability_calculator" \
-O /calculator
- chmod +x /calculator
script:
- /matcher
- /calculator process
artifacts:
paths:
- "**/gl-sbom-*.cdx.json"
reports:
cyclonedx: "**/gl-sbom-*.cdx.json"
bandit-sast:
extends: .sast-analyzer
script:

View File

@ -0,0 +1,145 @@
# frozen_string_literal: true
module Gitlab
module Diff
class MergeRequestSuggestion
include Gitlab::Utils::StrongMemoize
TargetLineNotFound = Class.new(StandardError)
SUGGESTION_HEADER = "```suggestion:"
SUGGESTION_FOOTER = "```"
def initialize(diff, path, merge_request)
@diff = diff
@path = path
@merge_request = merge_request
@project = merge_request.project
end
def note_attributes_hash
{
position: position,
note: suggestion,
type: "DiffNote",
noteable_type: MergeRequest,
noteable_id: @merge_request.id
}
end
private
def diff_lines
parsed_lines = Gitlab::Diff::Parser.new.parse(@diff.lines)
lines = []
parsed_lines.each_with_index do |line, index|
next if line.text.start_with?("diff --git") && index == 0
next if line.type == 'match'
lines << line
end
lines
end
strong_memoize_attr :diff_lines
def suggestion_start_line
diff_lines.first.old_pos
end
def suggestion_last_removed_line
diff_lines.reverse.find(&:removed?).old_pos
end
strong_memoize_attr :suggestion_last_removed_line
def suggestion_line_count
# We subtract the `suggestion_start_line` from `suggestion_last_removed_line` since we'll be
# creating the diff note on the merge_request diff line corresponding to the `suggestion_last_removed_line`.
# This is to ensure that the suggestion will only replace the lines that
# also exist in the supplied diff patch.
suggestion_last_removed_line - suggestion_start_line
end
def suggestion_last_line
diff_lines.last.old_pos
end
def suggestion_last_added_line
diff_lines.reverse.find(&:added?).new_pos
end
def remainder_suggestion_line_count
# We subtract the position of last added line from the last line in
# supplied diff patch so we can get the rest of the lines that will need
# to be replaced by the suggestion.
#
# This is needed so we can include the lines that need to be replaced
# below the line the diff note with suggestion is being posted on.
suggestion_last_line - suggestion_last_added_line
end
def suggestion_target_line
# We use the `suggestion_last_removed_line` as the line where we will create the note
# so the suggestion will show right on the last line that the suggestion will
# replace. This allows us to show the diff of the lines going to be replaced
# in the `Overview` tab.
#
# We get the `suggestion_last_removed_line` and find the corresponding line in
# the merge request diff of the specific file being suggested on.
#
# This is to ensure we can create the note on the correct line in the merge_request diff.
raise TargetLineNotFound if merge_request_diff_file.nil?
merge_request_diff_file.diff_lines.find { |line| line.new_line == suggestion_last_removed_line }
end
strong_memoize_attr :suggestion_target_line
def suggestion_meta
"-#{suggestion_line_count}+#{remainder_suggestion_line_count}"
end
def suggestion
array = [SUGGESTION_HEADER + suggestion_meta]
diff_lines.each do |line|
array << line.text(prefix: false) if line.added? || line.unchanged?
end
array << SUGGESTION_FOOTER
array.join("\n")
end
def latest_merge_request_diff
@merge_request.latest_merge_request_diff
end
strong_memoize_attr :latest_merge_request_diff
def position
{
position_type: "text",
old_path: @path,
new_path: @path,
base_sha: latest_merge_request_diff.base_commit_sha,
head_sha: latest_merge_request_diff.head_commit_sha,
start_sha: latest_merge_request_diff.start_commit_sha,
old_line: (suggestion_target_line&.old_pos unless suggestion_target_line.added?),
new_line: (suggestion_target_line&.new_pos unless suggestion_target_line.removed?),
ignore_whitespace_change: false
}
end
def merge_request_diff_file
diff_options = {
paths: [@path],
expanded: true,
include_stats: false,
ignore_whitespace_change: false
}
@merge_request.diffs(diff_options).diff_files.first
end
strong_memoize_attr :merge_request_diff_file
end
end
end

View File

@ -20086,9 +20086,18 @@ msgstr ""
msgid "DuoChat|How to use GitLab"
msgstr ""
msgid "DuoChat|Included references"
msgstr ""
msgid "DuoChat|No results found"
msgstr ""
msgid "DuoChat|Reset conversation and ignore the previous messages."
msgstr ""
msgid "DuoChat|Search %{categoryLabel}..."
msgstr ""
msgid "DuoChat|The issue, epic, or code you're viewing"
msgstr ""
@ -24633,6 +24642,36 @@ msgstr ""
msgid "GitLab.com import"
msgstr ""
msgid "GitLabDuo|Be specific and include any requirements."
msgstr ""
msgid "GitLabDuo|Cancel"
msgstr ""
msgid "GitLabDuo|Collapse"
msgstr ""
msgid "GitLabDuo|Expand"
msgstr ""
msgid "GitLabDuo|Generate plan"
msgstr ""
msgid "GitLabDuo|Goal"
msgstr ""
msgid "GitLabDuo|Image"
msgstr ""
msgid "GitLabDuo|It should have any tools necessary for the workflow installed."
msgstr ""
msgid "GitLabDuo|The container image to run the workflow in."
msgstr ""
msgid "GitLabDuo|What would you like to do and how."
msgstr ""
msgid "GitLabPagesDomains|Retry"
msgstr ""
@ -51282,6 +51321,9 @@ msgstr ""
msgid "Show more"
msgstr ""
msgid "Show more breadcrumbs"
msgstr ""
msgid "Show one file at a time"
msgstr ""
@ -55662,6 +55704,9 @@ msgstr ""
msgid "This issue is in a child epic of the filtered epic"
msgstr ""
msgid "This item is disabled"
msgstr ""
msgid "This job could not start because it could not retrieve the needed artifacts%{punctuation}%{invalid_dependencies}"
msgstr ""
@ -61616,6 +61661,11 @@ msgstr ""
msgid "WorkItem|%{workItemType} deleted"
msgstr ""
msgid "WorkItem|%{workItemType} has 1 linked item"
msgid_plural "WorkItem|%{workItemType} has %{itemCount} linked items"
msgstr[0] ""
msgstr[1] ""
msgid "WorkItem|%{workItemType} is blocked by 1 item"
msgid_plural "WorkItem|%{workItemType} is blocked by %{itemCount} items"
msgstr[0] ""

View File

@ -0,0 +1,73 @@
#!/usr/bin/env node
import { ESLint } from 'eslint';
const RULE_REQUIRE_VALID_HELP_PAGE_PATH = 'local-rules/require-valid-help-page-path';
const RULE_VUE_REQUIRE_VALID_HELP_PAGE_LINK_COMPONENT =
'local-rules/vue-require-valid-help-page-link-component';
const RULES = [RULE_REQUIRE_VALID_HELP_PAGE_PATH, RULE_VUE_REQUIRE_VALID_HELP_PAGE_LINK_COMPONENT];
function createESLintInstance(overrideConfig) {
return new ESLint({ useEslintrc: false, overrideConfig, fix: false });
}
function lint(eslint, filePaths) {
return eslint.lintFiles(filePaths);
}
function outputLintingResults(results) {
const outdatedLinksErrors = results.reduce((acc, result) => {
const errors = result.messages.filter((message) => RULES.includes(message.ruleId));
if (errors.length) {
acc.push({
...result,
messages: errors,
errorCount: errors.length,
suppressedMessages: [],
fatalErrorCount: 0,
warningCount: 0,
fixableErrorCount: 0,
fixableWarningCount: 0,
});
}
return acc;
}, []);
return outdatedLinksErrors;
}
async function lintFiles(filePaths) {
console.log(
`Running ESLint with the following rules enabled:${RULES.map((rule) => `\n* ${rule}`).join('')}`,
);
const overrideConfig = {
env: {
browser: true,
es2020: true,
},
parserOptions: {
parser: 'espree',
ecmaVersion: 'latest',
sourceType: 'module',
},
extends: ['plugin:vue/recommended'],
plugins: ['local-rules'],
rules: {
[RULE_REQUIRE_VALID_HELP_PAGE_PATH]: 'error',
[RULE_VUE_REQUIRE_VALID_HELP_PAGE_LINK_COMPONENT]: 'error',
},
};
const eslint = createESLintInstance(overrideConfig);
const results = await lint(eslint, filePaths);
const formatter = await eslint.loadFormatter();
const errors = outputLintingResults(results);
if (errors.length > 0) {
console.log(formatter.format(errors));
process.exitCode = 1;
} else {
console.log('No issues found!');
}
}
lintFiles(['./{,ee/}app/assets/javascripts/**/*{.js,.vue}']);

View File

@ -0,0 +1,18 @@
diff --git a/cwe-23.rb b/cwe-23.rb
--- a/cwe-23.rb
+++ b/cwe-23.rb
@@ -2,9 +2,10 @@
get '/file/:filename' do
- filename = params['filename']
- file_path = File.join("./files", filename)
+ sanitized_filename = File.basename(params['filename'].to_s)
+ file_path = File.expand_path(File.join("./files", sanitized_filename))
+ base_path = File.expand_path("./files")
- if File.exist?(file_path) && file_path.start_with?("./files/")
- send_file(file_path)
+ if File.exist?(file_path) && file_path.start_with?(base_path)
+ send_file(file_path, disposition: 'attachment', filename: sanitized_filename)
else
halt 404, "File not found"

View File

@ -0,0 +1,11 @@
@@ -2,9 +2,7 @@ require 'sinatra'
get '/file/:filename' do
filename = params['filename']
- # Sanitize the filename to prevent path traversal
- safe_filename = File.basename(filename)
- file_path = File.join(\"./files\", safe_filename)
+ file_path = File.join(\"./files\", filename)
if File.exist?(file_path) && file_path.start_with?(\"./files/\")
send_file(file_path)

View File

@ -0,0 +1,12 @@
```suggestion:-6+1
get '/file/:filename' do
sanitized_filename = File.basename(params['filename'].to_s)
file_path = File.expand_path(File.join("./files", sanitized_filename))
base_path = File.expand_path("./files")
if File.exist?(file_path) && file_path.start_with?(base_path)
send_file(file_path, disposition: 'attachment', filename: sanitized_filename)
else
halt 404, "File not found"
```

View File

@ -80,6 +80,7 @@ describe('View branch rules', () => {
const createComponent = async ({
glFeatures = { editBranchRules: true },
canAdminProtectedBranches = true,
branchRulesQueryHandler = branchRulesMockRequestHandler,
deleteMutationHandler = deleteBranchRuleSuccessHandler,
editMutationHandler = editBranchRuleSuccessHandler,
@ -98,6 +99,7 @@ describe('View branch rules', () => {
protectedBranchesPath,
branchRulesPath,
glFeatures,
canAdminProtectedBranches,
},
stubs: {
Protection,
@ -244,6 +246,13 @@ describe('View branch rules', () => {
});
describe('Editing branch rule', () => {
describe('when canAdminProtectedBranches is false', () => {
it('does not render edit rule button', () => {
createComponent({ canAdminProtectedBranches: false });
expect(findEditRuleNameButton().exists()).toBe(false);
});
});
beforeEach(async () => {
await createComponent();
});
@ -324,6 +333,13 @@ describe('View branch rules', () => {
});
describe('Deleting branch rule', () => {
describe('when canAdminProtectedBranches is false', () => {
it('does not render delete rule button', () => {
createComponent({ canAdminProtectedBranches: false });
expect(findDeleteRuleButton().exists()).toBe(false);
});
});
it('renders delete rule button', () => {
expect(findDeleteRuleButton().text()).toBe('Delete rule');
});

View File

@ -633,7 +633,7 @@ describe('IssuableItem', () => {
window.open = jest.fn();
});
it('emits an event on row click', async () => {
const { iid, webUrl } = mockIssuable;
const { iid, webUrl, type: workItemType } = mockIssuable;
wrapper = createComponent({
preventRedirect: true,
@ -642,11 +642,11 @@ describe('IssuableItem', () => {
await findIssuableItemWrapper().trigger('click');
expect(wrapper.emitted('select-issuable')).toEqual([[{ iid, webUrl }]]);
expect(wrapper.emitted('select-issuable')).toEqual([[{ iid, webUrl, workItemType }]]);
});
it('includes fullPath in emitted event for work items', async () => {
const { iid, webUrl } = mockIssuable;
const { iid, webUrl, type: workItemType } = mockIssuable;
const fullPath = 'gitlab-org/gitlab';
wrapper = createComponent({
@ -657,7 +657,9 @@ describe('IssuableItem', () => {
await findIssuableItemWrapper().trigger('click');
expect(wrapper.emitted('select-issuable')).toEqual([[{ iid, webUrl, fullPath }]]);
expect(wrapper.emitted('select-issuable')).toEqual([
[{ iid, webUrl, fullPath, workItemType }],
]);
});
it('does not apply highlighted class when item is not active', () => {

View File

@ -20,6 +20,8 @@ import {
removeLinkedWorkItemResponse,
workItemLinkedItemsResponse,
workItemEmptyLinkedItemsResponse,
workItemSingleLinkedItemResponse,
mockLinkedItems,
} from '../../mock_data';
describe('WorkItemRelationships', () => {
@ -74,7 +76,7 @@ describe('WorkItemRelationships', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findErrorMessage = () => wrapper.findComponent(GlAlert);
const findEmptyRelatedMessageContainer = () => wrapper.findByTestId('crud-empty');
const findLinkedItemsCountContainer = () => wrapper.findByTestId('crud-count');
const findLinkedItemsCountBadge = () => wrapper.findByTestId('linked-items-count-bage');
const findLinkedItemsHelpLink = () => wrapper.findByTestId('help-link');
const findAllWorkItemRelationshipListComponents = () =>
wrapper.findAllComponents(WorkItemRelationshipList);
@ -115,7 +117,7 @@ describe('WorkItemRelationships', () => {
// renders all 3 lists: blocking, blocked by and related to
expect(findAllWorkItemRelationshipListComponents().length).toBe(3);
expect(findLinkedItemsCountContainer().text()).toBe('3');
expect(findLinkedItemsCountBadge().text()).toBe('3');
});
it('shows an alert when list loading fails', async () => {
@ -146,7 +148,7 @@ describe('WorkItemRelationships', () => {
it('removes linked item and shows toast message when removeLinkedItem event is emitted', async () => {
await createComponent();
expect(findLinkedItemsCountContainer().text()).toBe('3');
expect(findLinkedItemsCountBadge().text()).toBe('3');
await findAllWorkItemRelationshipListComponents()
.at(0)
@ -163,7 +165,7 @@ describe('WorkItemRelationships', () => {
expect($toast.show).toHaveBeenCalledWith('Linked item removed');
expect(findLinkedItemsCountContainer().text()).toBe('2');
expect(findLinkedItemsCountBadge().text()).toBe('2');
});
it.each`
@ -238,5 +240,20 @@ describe('WorkItemRelationships', () => {
it('calls getShowLabelsFromLocalStorage on mount', () => {
expect(utils.getShowLabelsFromLocalStorage).toHaveBeenCalled();
});
it.each`
ariaLabel | linkedItemsResponse
${`Task has ${mockLinkedItems.linkedItems.nodes.length} linked items`} | ${workItemLinkedItemsResponse}
${'Task has 1 linked item'} | ${workItemSingleLinkedItemResponse}
`(
'renders the correct aria labels for the badge count',
async ({ ariaLabel, linkedItemsResponse }) => {
await createComponent({
workItemLinkedItemsHandler: jest.fn().mockResolvedValue(linkedItemsResponse),
});
expect(findLinkedItemsCountBadge().attributes('aria-label')).toBe(ariaLabel);
},
);
});
});

View File

@ -802,6 +802,54 @@ export const workItemEmptyLinkedItemsResponse = {
},
};
export const workItemSingleLinkedItemResponse = {
data: {
workspace: {
__typename: 'Namespace',
id: 'gid://gitlab/Group/1',
workItem: {
id: 'gid://gitlab/WorkItem/2',
widgets: [
{
type: WIDGET_TYPE_LINKED_ITEMS,
linkedItems: {
nodes: [
{
linkId: 'gid://gitlab/WorkItems::RelatedWorkItemLink/8',
linkType: 'is_blocked_by',
workItem: {
id: 'gid://gitlab/WorkItem/675',
iid: '83',
confidential: true,
workItemType: {
id: 'gid://gitlab/WorkItems::Type/5',
name: 'Task',
iconName: 'issue-type-task',
__typename: 'WorkItemType',
},
reference: 'test-project-path#1',
title: 'Task 1201',
state: 'OPEN',
createdAt: '2023-03-28T10:50:16Z',
closedAt: null,
webUrl: '/gitlab-org/gitlab-test/-/work_items/83',
widgets: [],
__typename: 'WorkItem',
},
__typename: 'LinkedWorkItemType',
},
],
__typename: 'LinkedWorkItemTypeConnection',
},
__typename: 'WorkItemWidgetLinkedItems',
},
],
__typename: 'WorkItem',
},
},
},
};
export const workItemBlockedByLinkedItemsResponse = {
data: {
workspace: {

View File

@ -0,0 +1,59 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedBuildRecords, feature_category: :continuous_integration, migration: :gitlab_ci do
let(:pipelines_table) { table(:p_ci_pipelines, database: :ci, primary_key: :id) }
let(:builds_table) { table(:p_ci_builds, database: :ci, primary_key: :id) }
let!(:regular_pipeline) { pipelines_table.create!(project_id: 600, partition_id: 100) }
let!(:deleted_pipeline) { pipelines_table.create!(project_id: 600, partition_id: 100) }
let!(:other_pipeline) { pipelines_table.create!(project_id: 600, partition_id: 100) }
let!(:regular_build) do
builds_table.create!(partition_id: 100, project_id: 600, commit_id: regular_pipeline.id)
end
let!(:orphaned_build) do
builds_table.create!(partition_id: 100, project_id: 600, commit_id: deleted_pipeline.id)
end
let(:connection) { Ci::ApplicationRecord.connection }
around do |example|
connection.transaction do
connection.execute(<<~SQL)
ALTER TABLE ci_pipelines DISABLE TRIGGER ALL;
SQL
example.run
connection.execute(<<~SQL)
ALTER TABLE ci_pipelines ENABLE TRIGGER ALL;
SQL
end
end
describe '#perform' do
subject(:migration) do
described_class.new(
start_id: builds_table.minimum(:commit_id),
end_id: builds_table.maximum(:commit_id),
batch_table: :p_ci_builds,
batch_column: :commit_id,
sub_batch_size: 100,
pause_ms: 0,
connection: connection
)
end
it 'deletes from p_ci_builds where commit_id has no related record at p_ci_pipelines.id', :aggregate_failures do
expect { deleted_pipeline.delete }.to not_change { builds_table.count }
expect { migration.perform }.to change { builds_table.count }.from(2).to(1)
expect(regular_build.reload).to be_persisted
expect { orphaned_build.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end

View File

@ -0,0 +1,43 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateDetumbledEmailInEmails, feature_category: :user_management do
let(:emails) { table(:emails) }
let(:users) { table(:users) }
let!(:user) { users.create!(username: 'john_doe', email: 'johndoe@gitlab.com', projects_limit: 10) }
let!(:email1) do
emails.create!(user_id: user.id, email: 'user@gmail.com')
end
let!(:email2) do
emails.create!(user_id: user.id, email: 'user.name+gitlab@gmail.com')
end
let!(:email3) do
emails.create!(user_id: user.id, email: 'user.name@example.com', detumbled_email: 'already_set@example.com')
end
describe '#perform' do
subject(:perform_migration) do
described_class.new(
start_id: emails.first.id,
end_id: emails.last.id,
batch_table: :emails,
batch_column: :id,
sub_batch_size: emails.count,
pause_ms: 0,
connection: ActiveRecord::Base.connection
).perform
end
let(:expected_names) { %w[user@gmail.com username@gmail.com user.name@example.com] }
it 'successfully sets the detumbled_email' do
expect { perform_migration }.to change { email1.reload.detumbled_email }.from(nil).to('user@gmail.com')
.and change { email2.reload.detumbled_email }.from(nil).to('username@gmail.com')
.and not_change { email3.reload.detumbled_email }
end
end
end

View File

@ -0,0 +1,65 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Diff::MergeRequestSuggestion, feature_category: :vulnerability_management do
describe '.note_attributes_hash' do
let_it_be(:fixtures_folder) { Rails.root.join('spec/fixtures/lib/gitlab/diff/merge_request_suggestion') }
let_it_be(:filepath) { 'cwe-23.rb' }
let_it_be(:merge_request_diff) { create(:merge_request_diff) }
let_it_be(:merge_request_diff_file) do
create(:merge_request_diff_file,
merge_request_diff: merge_request_diff,
new_file: false,
a_mode: 100644,
b_mode: 100644,
new_path: filepath,
old_path: filepath,
diff: File.read(File.join(fixtures_folder, 'merge_request.diff'))
)
end
let_it_be(:merge_request) do
create(:merge_request, merge_request_diffs: [merge_request_diff], latest_merge_request_diff: merge_request_diff)
end
let_it_be(:diff) { File.read(File.join(fixtures_folder, 'input.diff')) }
subject(:attributes_hash) { described_class.new(diff, filepath, merge_request).note_attributes_hash }
before do
merge_request.reload
end
context 'when a valid diff is supplied' do
it 'returns a correctly formatted suggestion request payload' do
position_payload = {
position_type: 'text',
old_path: filepath,
new_path: filepath,
base_sha: merge_request.latest_merge_request_diff.base_commit_sha,
head_sha: merge_request.latest_merge_request_diff.head_commit_sha,
start_sha: merge_request.latest_merge_request_diff.start_commit_sha,
old_line: 10,
new_line: 8,
ignore_whitespace_change: false
}
expect(attributes_hash[:type]).to eq('DiffNote')
expect(attributes_hash[:noteable_type]).to eq(MergeRequest)
expect(attributes_hash[:noteable_id]).to eq(merge_request.id)
expect(attributes_hash[:position]).to eq(position_payload)
expect(attributes_hash[:note]).to eq(File.read(File.join(fixtures_folder, 'suggestion.md')))
end
end
context 'when the filepath does not match the diff' do
let_it_be(:filepath) { 'cwe-123.rb' }
it 'raises an error' do
expect { attributes_hash }.to raise_exception(described_class::TargetLineNotFound)
end
end
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueuePopulateDetumbledEmailInEmails, feature_category: :user_management do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :emails,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
end
end

View File

@ -0,0 +1,53 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueDeleteOrphanedBuildRecords, migration: :gitlab_ci, feature_category: :continuous_integration do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :p_ci_builds,
column_name: :commit_id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_ci
)
}
end
end
context 'when executed on .com' do
before do
allow(Gitlab).to receive(:com_except_jh?).and_return(true)
end
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :p_ci_builds,
column_name: :commit_id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_ci,
batch_min_value: described_class::MIN_PIPELINE_ID
)
}
end
end
end
end

View File

@ -0,0 +1,57 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe TruncateCiFinishedBuildChSyncEventsIfClickHouseNotConfigured,
migration: :gitlab_ci, feature_category: :fleet_visibility do
let(:migration) { described_class.new }
let(:connection) { ::Ci::ApplicationRecord.connection }
let(:table_name) { :p_ci_finished_build_ch_sync_events }
let(:sync_events_table) { table(table_name, primary_key: :build_id, database: :ci) }
let(:partition) { 100 }
before do
# NOTE: p_ci_finished_build_ch_sync_events does not have a default partition attached,
# and the partitioned_table helper does not create one if the :disallow_database_ddl_feature_flags ops FF
# is enabled, so we need to create one here temporarily.
connection.execute <<~SQL
DROP TABLE IF EXISTS #{table_name}_#{partition};
CREATE TABLE #{table_name}_#{partition} PARTITION OF #{table_name}
FOR VALUES IN (#{partition});
SQL
sync_events_table.create!(partition: partition, build_id: 1, build_finished_at: Time.current, project_id: 1)
sync_events_table.create!(partition: partition, build_id: 2, build_finished_at: Time.current)
end
after do
connection.execute <<~SQL
DROP TABLE #{table_name}_#{partition};
SQL
end
context 'when ClickHouse is not configured' do
before do
allow(::Gitlab::ClickHouse).to receive(:configured?).and_return(false)
end
it 'truncates p_ci_finished_build_ch_sync_events table' do
expect { migrate! }.to change { sync_events_table.count }.from(2).to(0)
expect { schema_migrate_down! }.not_to change { sync_events_table.count }.from(0)
end
end
context 'when ClickHouse is configured' do
before do
allow(::Gitlab::ClickHouse).to receive(:configured?).and_return(true)
end
it 'does not truncate p_ci_finished_build_ch_sync_events table' do
expect { migrate! }.not_to change { sync_events_table.count }.from(2)
expect { schema_migrate_down! }.not_to change { sync_events_table.count }.from(2)
end
end
end