Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-05-02 15:12:03 +00:00
parent cd1564edca
commit ea042ca488
64 changed files with 945 additions and 1049 deletions

View File

@ -158,13 +158,11 @@ Rails/Date:
- 'spec/lib/gitlab/bitbucket_import/importers/pull_request_notes_importer_spec.rb'
- 'spec/lib/gitlab/ci/charts_spec.rb'
- 'spec/lib/gitlab/ci/components/instance_path_spec.rb'
- 'spec/lib/gitlab/ci/components/usages/aggregator_spec.rb'
- 'spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb'
- 'spec/lib/gitlab/internal_events_spec.rb'
- 'spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb'
- 'spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb'
- 'spec/mailers/notify_spec.rb'
- 'spec/models/ci/catalog/resources/components/usage_spec.rb'
- 'spec/models/ci/catalog/resources/version_spec.rb'
- 'spec/models/concerns/ignorable_columns_spec.rb'
- 'spec/models/concerns/milestoneish_spec.rb'

View File

@ -76,7 +76,6 @@ Rails/WhereRange:
- 'ee/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_in_batch.rb'
- 'lib/gitlab/background_migration/disable_legacy_open_source_license_for_inactive_public_projects.rb'
- 'lib/gitlab/background_migration/purge_stale_security_scans.rb'
- 'lib/gitlab/ci/components/usages/aggregator.rb'
- 'lib/gitlab/database/background_migration/batched_job.rb'
- 'lib/gitlab/database/reindexing/index_selection.rb'
- 'lib/gitlab/relative_positioning/item_context.rb'

View File

@ -1026,7 +1026,6 @@ RSpec/BeEq:
- 'spec/models/ci/build_dependencies_spec.rb'
- 'spec/models/ci/build_metadata_spec.rb'
- 'spec/models/ci/build_spec.rb'
- 'spec/models/ci/catalog/resources/components/usage_spec.rb'
- 'spec/models/ci/catalog/resources/sync_event_spec.rb'
- 'spec/models/ci/commit_with_pipeline_spec.rb'
- 'spec/models/ci/finished_pipeline_ch_sync_event_spec.rb'

View File

@ -389,7 +389,6 @@ RSpec/ExampleWithoutDescription:
- 'spec/models/ci/build_spec.rb'
- 'spec/models/ci/build_trace_chunk_spec.rb'
- 'spec/models/ci/catalog/resource_spec.rb'
- 'spec/models/ci/catalog/resources/components/usage_spec.rb'
- 'spec/models/ci/catalog/resources/version_spec.rb'
- 'spec/models/ci/catalog/verified_namespace_spec.rb'
- 'spec/models/ci/daily_build_group_report_result_spec.rb'

View File

@ -111,7 +111,6 @@ RSpec/InstanceVariable:
- 'spec/services/notes/post_process_service_spec.rb'
- 'spec/services/notes/update_service_spec.rb'
- 'spec/services/notification_service_spec.rb'
- 'spec/services/projects/create_from_template_service_spec.rb'
- 'spec/services/projects/download_service_spec.rb'
- 'spec/services/upload_service_spec.rb'
- 'spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb'

View File

@ -1 +1 @@
d389b47d972835c6b4f5dbacf07f7965361d0c8f
45fcd3e43ebd45141c506849bfa6dc7f09584476

View File

@ -23,6 +23,7 @@ export async function mountIssuesDashboardApp() {
emptyStateWithoutFilterSvgPath,
hasBlockedIssuesFeature,
hasIssuableHealthStatusFeature,
hasIterationsFeature,
hasIssueDateFilterFeature,
hasIssueWeightsFeature,
hasOkrsFeature,
@ -50,6 +51,7 @@ export async function mountIssuesDashboardApp() {
emptyStateWithoutFilterSvgPath,
hasBlockedIssuesFeature: parseBoolean(hasBlockedIssuesFeature),
hasIssuableHealthStatusFeature: parseBoolean(hasIssuableHealthStatusFeature),
hasIterationsFeature: parseBoolean(hasIterationsFeature),
hasIssueDateFilterFeature: parseBoolean(hasIssueDateFilterFeature),
hasIssueWeightsFeature: parseBoolean(hasIssueWeightsFeature),
hasOkrsFeature: parseBoolean(hasOkrsFeature),

View File

@ -71,7 +71,9 @@ export default {
<template>
<span>
<slot name="weight"></slot>
<issuable-milestone v-if="milestone" :milestone="milestone" />
<slot name="iteration"></slot>
<work-item-attribute
v-if="dueDateText"
anchor-id="issuable-due-date"

View File

@ -1,12 +1,17 @@
import Vue from 'vue';
export const staticBreadcrumbs = Vue.observable({});
import { destroySuperSidebarBreadcrumbs } from '~/super_sidebar/super_sidebar_breadcrumbs';
import { staticBreadcrumbs } from './breadcrumbs_state';
export const injectVueAppBreadcrumbs = (
router,
BreadcrumbsComponent,
apolloProvider = null,
provide = {},
// this is intended to be a temporary option. Once all uses of
// injectVueAppBreadcrumbs use it, the option should be removed and its
// behavior should be the default.
// Cf. https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186906
{ singleNavOptIn = false } = {},
// eslint-disable-next-line max-params
) => {
const injectBreadcrumbEl = document.querySelector('#js-injected-page-breadcrumbs');
@ -15,10 +20,22 @@ export const injectVueAppBreadcrumbs = (
return false;
}
// Hide the last of the static breadcrumbs by nulling its values.
// This way, the separator "/" stays visible and also the new "last" static item isn't displayed in bold font.
staticBreadcrumbs.items[staticBreadcrumbs.items.length - 1].text = '';
staticBreadcrumbs.items[staticBreadcrumbs.items.length - 1].href = '';
if (singleNavOptIn) {
destroySuperSidebarBreadcrumbs();
// After singleNavOptIn is turned on for all Vue apps, we can stop
// changing the content of staticBreadcrumbs and instead pass a mutated
// copy of it to the CustomBreadcrumbsRoot component. For now, we need
// to conditionally mutate the staticBreadcrumbs object so that the last
// breadcrumb is hidden for Vue apps that have not opted in to the
// singleNavOptIn.
// Cf. https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186906
staticBreadcrumbs.items = staticBreadcrumbs.items.slice(0, -1);
} else {
// Hide the last of the static breadcrumbs by nulling its values.
// This way, the separator "/" stays visible and also the new "last" static item isn't displayed in bold font.
staticBreadcrumbs.items[staticBreadcrumbs.items.length - 1].text = '';
staticBreadcrumbs.items[staticBreadcrumbs.items.length - 1].href = '';
}
return new Vue({
el: injectBreadcrumbEl,
@ -29,6 +46,9 @@ export const injectVueAppBreadcrumbs = (
render(createElement) {
return createElement(BreadcrumbsComponent, {
class: injectBreadcrumbEl.className,
props: {
staticBreadcrumbs,
},
});
},
});

View File

@ -0,0 +1,3 @@
import Vue from 'vue';
export const staticBreadcrumbs = Vue.observable({ items: [] });

View File

@ -0,0 +1,35 @@
import Vue from 'vue';
import { GlBreadcrumb } from '@gitlab/ui';
import { staticBreadcrumbs } from '~/lib/utils/breadcrumbs_state';
let superSidebarBreadcrumbsApp = null;
export function initPageBreadcrumbs() {
const el = document.querySelector('#js-vue-page-breadcrumbs');
if (!el) return false;
const { breadcrumbsJson } = el.dataset;
staticBreadcrumbs.items = JSON.parse(breadcrumbsJson);
superSidebarBreadcrumbsApp = new Vue({
el,
name: 'SuperSidebarBreadcrumbs',
destroyed() {
this.$el?.remove();
superSidebarBreadcrumbsApp = null;
},
render(h) {
return h(GlBreadcrumb, {
props: staticBreadcrumbs,
});
},
});
return superSidebarBreadcrumbsApp;
}
export function destroySuperSidebarBreadcrumbs() {
if (superSidebarBreadcrumbsApp) {
superSidebarBreadcrumbsApp.$destroy();
}
}

View File

@ -1,9 +1,8 @@
import Vue from 'vue';
import { GlBreadcrumb, GlToast } from '@gitlab/ui';
import { GlToast } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import { convertObjectPropsToCamelCase, parseBoolean } from '~/lib/utils/common_utils';
import { apolloProvider } from '~/graphql_shared/issuable_client';
import { staticBreadcrumbs } from '~/lib/utils/breadcrumbs';
import { JS_TOGGLE_EXPAND_CLASS, CONTEXT_NAMESPACE_GROUPS } from './constants';
import createStore from './components/global_search/store';
import {
@ -14,6 +13,8 @@ import SuperSidebar from './components/super_sidebar.vue';
import SuperSidebarToggle from './components/super_sidebar_toggle.vue';
import AdvancedSearchModal from './components/global_search/components/global_search_header_app.vue';
export { initPageBreadcrumbs } from './super_sidebar_breadcrumbs';
Vue.use(GlToast);
Vue.use(VueApollo);
@ -197,24 +198,6 @@ export const initSuperSidebarToggle = () => {
});
};
export function initPageBreadcrumbs() {
const el = document.querySelector('#js-vue-page-breadcrumbs');
if (!el) return false;
const { breadcrumbsJson } = el.dataset;
staticBreadcrumbs.items = JSON.parse(breadcrumbsJson);
return new Vue({
el,
name: 'SuperSidebarBreadcrumbs',
render(h) {
return h(GlBreadcrumb, {
props: staticBreadcrumbs,
});
},
});
}
export function initAdvancedSearchModal({
rootPath,
isSaas,

View File

@ -25,6 +25,13 @@ export default {
default: false,
},
},
props: {
staticBreadcrumbs: {
type: Object,
required: false,
default: () => ({ items: [] }),
},
},
computed: {
isWorkItemOnly() {
return this.glFeatures.workItemPlanningView;
@ -62,7 +69,9 @@ export default {
indexCrumb.href = this.listPath;
}
const crumbs = [indexCrumb];
const staticCrumbs = this.staticBreadcrumbs.items;
const crumbs = [...staticCrumbs, indexCrumb];
if (this.$route.name === ROUTES.new) {
crumbs.push({

View File

@ -438,6 +438,11 @@ export default {
const discussionId = this.getDiscussionIdFromSelection();
const text = await CopyAsGFM.selectionToGfm();
// Prevent 'r' being written.
if (e && typeof e.preventDefault === 'function') {
e.preventDefault();
}
// Check if selection is coming from an existing discussion
if (discussionId) {
gfmEventHub.$emit('quote-reply', {

View File

@ -77,7 +77,10 @@ export const initWorkItemsRoot = ({ workItemType, workspaceType, withTabs } = {}
breadcrumbParams.listPath = issuesListPath;
}
injectVueAppBreadcrumbs(router, WorkItemBreadcrumb, apolloProvider, breadcrumbParams);
injectVueAppBreadcrumbs(router, WorkItemBreadcrumb, apolloProvider, breadcrumbParams, {
// Cf. https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186906
singleNavOptIn: true,
});
apolloProvider.clients.defaultClient.cache.writeQuery({
query: activeDiscussionQuery,

View File

@ -291,6 +291,7 @@ module ApplicationSettingsHelper
:default_snippet_visibility,
:default_syntax_highlighting_theme,
:delete_inactive_projects,
:deletion_adjourned_period,
:deny_all_requests_except_allowed,
:disable_admin_oauth_scopes,
:disable_feed_token,

View File

@ -18,8 +18,6 @@ module Ci
belongs_to :project
has_many :components, class_name: 'Ci::Catalog::Resources::Component', foreign_key: :catalog_resource_id,
inverse_of: :catalog_resource
has_many :component_usages, class_name: 'Ci::Catalog::Resources::Components::Usage',
foreign_key: :catalog_resource_id, inverse_of: :catalog_resource
has_many :versions, class_name: 'Ci::Catalog::Resources::Version', foreign_key: :catalog_resource_id,
inverse_of: :catalog_resource
has_many :sync_events, class_name: 'Ci::Catalog::Resources::SyncEvent', foreign_key: :catalog_resource_id,

View File

@ -13,7 +13,6 @@ module Ci
belongs_to :project, inverse_of: :ci_components
belongs_to :catalog_resource, class_name: 'Ci::Catalog::Resource', inverse_of: :components
belongs_to :version, class_name: 'Ci::Catalog::Resources::Version', inverse_of: :components
has_many :usages, class_name: 'Ci::Catalog::Resources::Components::Usage', inverse_of: :component
has_many :last_usages, class_name: 'Ci::Catalog::Resources::Components::LastUsage', inverse_of: :component
# BulkInsertSafe must be included after the `has_many` declaration, otherwise it raises

View File

@ -21,7 +21,6 @@ module Ci
belongs_to :component, class_name: 'Ci::Catalog::Resources::Component', inverse_of: :usages
belongs_to :catalog_resource, class_name: 'Ci::Catalog::Resource', inverse_of: :component_usages
belongs_to :project, inverse_of: :ci_component_usages
validates :component, :catalog_resource, :project, :used_by_project_id, presence: true
validates :used_date, uniqueness: { scope: [:component_id, :used_by_project_id] }

View File

@ -205,9 +205,7 @@ class Project < ApplicationRecord
has_one :catalog_resource, class_name: 'Ci::Catalog::Resource', inverse_of: :project
has_many :ci_components, class_name: 'Ci::Catalog::Resources::Component', inverse_of: :project
# These are usages of the ci_components owned (not used) by the project
has_many :ci_component_last_usages, class_name: 'Ci::Catalog::Resources::Components::LastUsage', inverse_of: :component_project
has_many :ci_component_usages, class_name: 'Ci::Catalog::Resources::Components::Usage', inverse_of: :project
has_many :catalog_resource_versions, class_name: 'Ci::Catalog::Resources::Version', inverse_of: :project
has_many :catalog_resource_sync_events, class_name: 'Ci::Catalog::Resources::SyncEvent', inverse_of: :project

View File

@ -105,7 +105,9 @@ module Users
pipeline_schedules_inputs_adoption_banner: 102,
product_usage_data_collection_changes: 103,
dora_dashboard_migration_group: 104,
dora_dashboard_migration_project: 105
dora_dashboard_migration_project: 105,
enable_duo_banner_admin_duo_settings_page: 106, # EE-only
enable_duo_banner_admin_dashboard: 107 # EE-only
}
validates :feature_name,

View File

@ -12,13 +12,6 @@ module Ci
end
def execute
component_usage = Ci::Catalog::Resources::Components::Usage.new(
component: component,
catalog_resource: component.catalog_resource,
project: component.project,
used_by_project_id: used_by_project.id
)
component_last_usage = Ci::Catalog::Resources::Components::LastUsage.get_usage_for(component, used_by_project)
if component_last_usage.new_record?
@ -27,22 +20,9 @@ module Ci
component_last_usage.touch(:last_used_date)
end
component_last_usage.save # Save last usage regardless of component_usage
component_last_usage.save
if component_usage.save
ServiceResponse.success(message: 'Usage recorded')
else
errors = component_usage.errors || component_last_usage.errors
if errors.size == 1 && errors.first.type == :taken
ServiceResponse.success(message: 'Usage already recorded for today')
else
exception = ValidationError.new(errors.full_messages.join(', '))
Gitlab::ErrorTracking.track_exception(exception)
ServiceResponse.error(message: exception.message)
end
end
ServiceResponse.success(message: 'Usage recorded')
end
private

View File

@ -1,6 +1,9 @@
- breadcrumb_title _("Dashboard")
- page_title _("Dashboard")
= render_if_exists 'admin/enable_duo_banner_sm',
title: s_('AiPowered|AI-native features now available in IDEs'),
callouts_feature_name: 'enable_duo_banner_admin_dashboard'
= render_if_exists 'shared/manual_quarterly_reconciliation_banner'
= render_if_exists 'shared/submit_license_usage_data_banner'
= render_if_exists 'shared/qrtly_reconciliation_alert'

View File

@ -3,14 +3,16 @@
module Ci
module Catalog
module Resources
# This worker can be called multiple times simultaneously but only one can process data at a time.
# This is ensured by an exclusive lease guard in `Gitlab::Ci::Components::Usages::Aggregator`.
# The scheduling frequency should be == `Gitlab::Ci::Components::Usages::Aggregator::MAX_RUNTIME`
# so there is no time gap between job runs.
class AggregateLast30DayUsageWorker
include ApplicationWorker
include CronjobQueue # rubocop: disable Scalability/CronWorkerContext -- Periodic processing is required
MAX_RUNTIME = 4.minutes # Should be >= job scheduling frequency so there is no gap between job runs
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/155001#note_1941066672
# Includes extra time (1.minute) to execute `&usage_counts_block`
WORKER_DEDUP_TTL = MAX_RUNTIME + 1.minute
feature_category :pipeline_composition
data_consistency :sticky
@ -18,7 +20,7 @@ module Ci
idempotent!
deduplicate :until_executed, if_deduplicated: :reschedule_once,
ttl: Gitlab::Ci::Components::Usages::Aggregator::WORKER_DEDUP_TTL
ttl: WORKER_DEDUP_TTL
def perform
response = Ci::Catalog::Resources::AggregateLast30DayUsageService.new.execute

View File

@ -20,7 +20,6 @@ Gitlab::Database::Partitioning.register_models(
Ci::BuildTag,
Ci::BuildTraceMetadata,
Ci::BuildSource,
Ci::Catalog::Resources::Components::Usage,
Ci::Catalog::Resources::SyncEvent,
Ci::FinishedPipelineChSyncEvent,
Ci::JobAnnotation,

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddTempIndexOnNotesForProjectsNullAndIdSync < Gitlab::Database::Migration[2.3]
milestone '18.0'
disable_ddl_transaction!
INDEX_NAME = 'tmp_index_null_project_id_on_notes'
# rubocop:disable Migration/PreventIndexCreation -- Already added in a async index creation
# db/post_migrate/20250411043427_add_temp_index_on_notes_for_projects_null_and_id.rb
def up
add_concurrent_index :notes, :id, name: INDEX_NAME, where: 'project_id is NULL'
end
# rubocop:enable Migration/PreventIndexCreation
def down
remove_concurrent_index_by_name :notes, INDEX_NAME
end
end

View File

@ -0,0 +1 @@
b6e5cb2587f50239aaf5e436876ffc77f51ae1e04196d002f0a6e7e0178a4dd2

View File

@ -38389,6 +38389,8 @@ CREATE INDEX tmp_index_for_null_member_namespace_id ON members USING btree (memb
CREATE INDEX tmp_index_for_project_namespace_id_migration_on_routes ON routes USING btree (id) WHERE ((namespace_id IS NULL) AND ((source_type)::text = 'Project'::text));
CREATE INDEX tmp_index_null_project_id_on_notes ON notes USING btree (id) WHERE (project_id IS NULL);
CREATE INDEX tmp_index_pats_on_notification_columns_and_expires_at ON personal_access_tokens USING btree (id) WHERE ((expire_notification_delivered IS TRUE) AND (seven_days_notification_sent_at IS NULL) AND (expires_at IS NOT NULL));
CREATE INDEX tmp_index_project_statistics_cont_registry_size ON project_statistics USING btree (project_id) WHERE (container_registry_size = 0);

View File

@ -45565,6 +45565,8 @@ Name of the feature that the callout is for.
| <a id="usercalloutfeaturenameenumdora_dashboard_migration_group"></a>`DORA_DASHBOARD_MIGRATION_GROUP` | Callout feature name for dora_dashboard_migration_group. |
| <a id="usercalloutfeaturenameenumdora_dashboard_migration_project"></a>`DORA_DASHBOARD_MIGRATION_PROJECT` | Callout feature name for dora_dashboard_migration_project. |
| <a id="usercalloutfeaturenameenumduo_chat_callout"></a>`DUO_CHAT_CALLOUT` | Callout feature name for duo_chat_callout. |
| <a id="usercalloutfeaturenameenumenable_duo_banner_admin_dashboard"></a>`ENABLE_DUO_BANNER_ADMIN_DASHBOARD` | Callout feature name for enable_duo_banner_admin_dashboard. |
| <a id="usercalloutfeaturenameenumenable_duo_banner_admin_duo_settings_page"></a>`ENABLE_DUO_BANNER_ADMIN_DUO_SETTINGS_PAGE` | Callout feature name for enable_duo_banner_admin_duo_settings_page. |
| <a id="usercalloutfeaturenameenumfeature_flags_new_version"></a>`FEATURE_FLAGS_NEW_VERSION` | Callout feature name for feature_flags_new_version. |
| <a id="usercalloutfeaturenameenumgcp_signup_offer"></a>`GCP_SIGNUP_OFFER` | Callout feature name for gcp_signup_offer. |
| <a id="usercalloutfeaturenameenumgeo_enable_hashed_storage"></a>`GEO_ENABLE_HASHED_STORAGE` | Callout feature name for geo_enable_hashed_storage. |

View File

@ -172,3 +172,24 @@ and replace it internally:
application_setting.update(ci_jwt_signing_key: key)
end
```
### `401: unauthorized` status code
This error indicates that the authentication request failed. When using OpenID Connect (OIDC) authentication from GitLab pipelines to external services, `401 Unauthorized` errors can occur due to several common reasons:
- You used a deprecated token, such as `$CI_JOB_JWT_V2`, instead of [declaring an ID token](../yaml/_index.md#id_tokens). For more information, see [old versions of JSON Web Tokens are deprecated](../../update/deprecations.md#old-versions-of-json-web-tokens-are-deprecated).
- You mismatched `provider_name` values between your `.gitlab-ci.yml` file and the OIDC Identity Provider configuration on the external service.
- You missed or mismatched the `aud` (audience) claim between the ID token issued by GitLab and what the external service expects.
- You did not enable or configure the `id_tokens:` block in the GitLab CI/CD job.
To resolve the error, decode the token inside your job:
```shell
echo $OIDC_TOKEN | cut -d '.' -f2 | base64 -d | jq .
```
Make sure that:
- `aud` (audience) matches the expected audience (for example, the external services URL).
- `sub` (subject) is mapped in the services Identity Provider settings.
- `preferred_username` is not present by default in GitLab ID tokens.

View File

@ -128,5 +128,3 @@ Bronze-level subscribers:
- Fields in the [Merge requests API](../api/merge_requests.md) for [merge request approvals](../user/project/merge_requests/approvals/_index.md)
- Fields in the [Protected branches API](../api/protected_branches.md) that specify users or groups allowed to merge
- [Merge request approvals API](../api/merge_request_approvals.md)
- Development information:
- [Run Jenkins in a macOS development environment](../development/integrations/jenkins.md)

View File

@ -26,12 +26,14 @@ GitLab Duo AI-native features are powered by a generative AI model. The processi
The below reflects the current retention periods of GitLab AI model [Sub-Processors](https://about.gitlab.com/privacy/subprocessors/#third-party-sub-processors):
GitLab has arranged zero-day data retention with Anthropic, Fireworks AI, and Google for GitLab Duo requests. Anthropic, Fireworks AI, and Google discard model input and output data immediately after the output is provided; input and output data is not stored for abuse monitoring. Model input and output is not used to train models.
Excluding [Fireworks AI prompt caching](../project/repository/code_suggestions/_index.md#prompt-caching), GitLab has arranged zero-day data retention with Anthropic, Fireworks AI, and Google for GitLab Duo requests. Anthropic, Fireworks AI (when prompt caching is disabled), and Google discard model input and output data immediately after the output is provided; input and output data is not stored for abuse monitoring. Model input and output is never used to train models.
All of these AI providers are under data protection agreements with GitLab that prohibit the use of Customer Content for their own purposes, except to perform their independent legal obligations.
GitLab Duo Chat retains chat history to help you return quickly to previously discussed topics. You can delete chats in the GitLab Duo Chat interface. GitLab does not otherwise retain input and output data unless customers provide consent through a GitLab [Support Ticket](https://about.gitlab.com/support/portal/). Learn more about [AI feature logging](../../administration/logs/_index.md).
Fireworks AI prompt caching is enabled by default to improve Code Suggestions latency. For more information and how to opt out of prompt caching, see the [Code Suggestions prompt caching documentation](../project/repository/code_suggestions/_index.md#prompt-caching).
## Training data
GitLab does not train generative AI models.

View File

@ -277,10 +277,8 @@ Use the imported files in your IDE to provide context about your code project. I
Because of LLM limits and performance reasons, the content of the currently
opened file is truncated:
- For code completion:
- In GitLab 17.5 and earlier, to 2,048 tokens (roughly 8,192 characters).
- In GitLab 17.6 and later, to 32,000 tokens (roughly 128,000 characters).
- For code generation: to 142,856 tokens (roughly 500,000 characters).
- For code completion: to 32,000 tokens (roughly 128,000 characters).
- For code generation: to 200,000 tokens (roughly 800,000 characters).
Content above the cursor is prioritized over content below the cursor. The content
above the cursor is truncated from the left side, and content below the cursor
@ -330,6 +328,36 @@ To learn about the code that builds the prompt, see these files:
[`ai_gateway/code_suggestions/processing/completions.py`](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/blob/fcb3f485a8f047a86a8166aad81f93b6d82106a7/ai_gateway/code_suggestions/processing/completions.py#L273)
in the `modelops` repository.
## Prompt caching
{{< history >}}
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/535651) in GitLab 18.0.
{{< /history >}}
Prompt caching is enabled by default to improve Code Suggestions latency. When prompt caching is enabled, code completion prompt data is temporarily stored in memory by the model vendor. Prompt caching significantly improves latency by avoiding the re-processing of cached prompt and input data. The cached data is never logged to any persistent storage.
### Disable prompt caching
You can disable prompt caching for top-level groups in the GitLab Duo settings.
On GitLab.com:
1. On the left sidebar, select **Search or go to** and find your group.
1. Select **Settings > GitLab Duo**.
1. Select **Change configuration**.
1. Disable the **Prompt caching** toggle.
1. Select **Save changes**.
On GitLab Self-Managed:
1. On the left sidebar, select **Search or go to** and find your group.
1. Select **Settings > General**.
1. Expand **GitLab Duo features**.
1. Disable the **Prompt caching** toggle.
1. Select **Save changes**.
## Response time
Code Suggestions is powered by a generative AI model.

View File

@ -1,169 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Components
module Usages
# Component usage is defined as the number of unique `used_by_project_id`s in the table
# `p_catalog_resource_component_usages` for a given scope.
#
# This aggregator is intended to be run in a scheduled cron job. It implements a "continue later"
# mechanism with a Redis cursor, which enables the work to continue from where it was last interrupted
# on each run. It iterates through the target table in batches, in order of ID ascending. For each
# target ID, it collects the usage count using `distinct_each_batch` for the given usage window.
# We collect the count in Rails because the SQL query `COUNT(DISTINCT(*))` is not performant when the
# data volume is large.
#
# RUNTIME: The actual total runtime will be longer than MAX_RUNTIME because
# it depends on the execution time of `&usage_counts_block`.
# EXCLUSIVE LEASE: This aggregator is protected from parallel processing with an exclusive lease guard.
# WORKER: The worker running this service should be scheduled at the same cadence as MAX_RUNTIME, with:
# deduplicate :until_executed, if_deduplicated: :reschedule_once, ttl: WORKER_DEDUP_TTL
# STOPPING: When the aggregator's cursor advances past the max target_id, it resets to 0. This means
# it may reprocess targets that have already been processed for the given usage window.
# To minimize redundant reprocessing, you should prevent the aggregator from running once it
# meets a certain stop condition (e.g. when all targets have been marked as "processed").
#
##### Usage
#
# each_batch:
# - Yields each batch of `usage_counts` to the given block. The block should:
# - Be able to handle targets that might be reprocessed multiple times.
# - Not exceed 1 minute in execution time.
# - `usage_counts` format: { target_object1 => 100, target_object2 => 200, ... }
# - If the lease is obtained, returns a Result containing `total_targets_completed` and
# `cursor_attributes`. Otherwise, returns nil.
#
# Example:
# return if done_processing?
#
# aggregator = Gitlab::Ci::Components::Usages::Aggregator.new(
# target_model: Ci::Catalog::Resource,
# group_by_column: :catalog_resource_id,
# usage_start_date: Date.today - 30.days,
# usage_end_date: Date.today - 1.day,
# lease_key: 'my_aggregator_service_lease_key'
# )
#
# result = aggregator.each_batch do |usage_counts|
# # Bulk update usage counts in the database
# end
#
##### Parameters
#
# target_model: Target model to iterate through. Model class should contain `include EachBatch`.
# group_by_column: This should be the usage table's foreign key of the target_model.
# usage_start_date & usage_end_date: Date objects specifiying the window of usage data to aggregate.
# lease_key: Used for obtaining an exclusive lease. Also used as part of the cursor Redis key.
#
# rubocop: disable CodeReuse/ActiveRecord -- Custom queries required for data processing
class Aggregator
include ExclusiveLeaseGuard
Result = Struct.new(:total_targets_completed, :cursor_attributes, keyword_init: true)
TARGET_BATCH_SIZE = 1000
DISTINCT_USAGE_BATCH_SIZE = 100
MAX_RUNTIME = 4.minutes # Should be >= job scheduling frequency so there is no gap between job runs
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/155001#note_1941066672
# Includes extra time (1.minute) to execute `&usage_counts_block`
WORKER_DEDUP_TTL = MAX_RUNTIME + 1.minute
LEASE_TIMEOUT = 10.minutes
def initialize(target_model:, group_by_column:, usage_start_date:, usage_end_date:, lease_key:)
@target_model = target_model
@group_by_column = group_by_column
@lease_key = lease_key # Used by ExclusiveLeaseGuard
@runtime_limiter = Gitlab::Metrics::RuntimeLimiter.new(MAX_RUNTIME)
@cursor = Aggregators::Cursor.new(
redis_key: "#{lease_key}:cursor",
target_model: target_model,
usage_window: Aggregators::Cursor::Window.new(usage_start_date, usage_end_date)
)
end
def each_batch(&usage_counts_block)
try_obtain_lease do
total_targets_completed = process_targets(&usage_counts_block)
Result.new(total_targets_completed: total_targets_completed, cursor_attributes: cursor.attributes)
end
end
private
attr_reader :target_model, :group_by_column, :cursor, :runtime_limiter
def process_targets
# Restore the scope from cursor so we can resume from the last run. `cursor.target_id` is 0
# when the Redis cursor is first initialized or when it advances past the max target ID.
restored_target_scope = target_model.where('id >= ?', cursor.target_id)
total_targets_completed = 0
restored_target_scope.each_batch(of: TARGET_BATCH_SIZE) do |targets_relation|
usage_counts = aggregate_usage_counts(targets_relation)
yield usage_counts if usage_counts.present?
total_targets_completed += usage_counts.length
break if runtime_limiter.over_time?
end
cursor.advance unless cursor.interrupted?
cursor.save!
total_targets_completed
end
def aggregate_usage_counts(targets_relation)
usage_counts = {}
targets_relation.order(:id).each do |target|
# When target.id is different from the cursor's target_id, it
# resets last_usage_count and last_used_by_project_id to 0.
cursor.target_id = target.id
usage_scope = ::Ci::Catalog::Resources::Components::Usage
.where(group_by_column => cursor.target_id)
.where(used_date: cursor.usage_window.start_date..cursor.usage_window.end_date)
# Restore the scope from cursor so we can resume from the last run if interrupted
restored_usage_scope = usage_scope.where('used_by_project_id > ?', cursor.last_used_by_project_id)
usage_counts[target] = cursor.last_usage_count
restored_usage_scope
.distinct_each_batch(column: :used_by_project_id, of: DISTINCT_USAGE_BATCH_SIZE) do |usages_relation|
count = usages_relation.count
usage_counts[target] += count
# If we're over time and count == batch size, it means there is likely another batch
# to process for the current target, so the usage count is incomplete. We store the
# last used_by_project_id and count so that we can resume counting on the next run.
if runtime_limiter.over_time? && count == DISTINCT_USAGE_BATCH_SIZE
cursor.interrupt!(
last_used_by_project_id: usages_relation.maximum(:used_by_project_id).to_i,
last_usage_count: usage_counts[target]
)
usage_counts.delete(target) # Remove the incomplete count
break
end
end
break if runtime_limiter.over_time?
end
usage_counts
end
def lease_timeout
LEASE_TIMEOUT
end
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
end
end

View File

@ -1,5 +1,5 @@
variables:
DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.126.0'
DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.128.0'
.dast-auto-deploy:
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${DAST_AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -1,5 +1,5 @@
variables:
AUTO_DEPLOY_IMAGE_VERSION: 'v2.126.0'
AUTO_DEPLOY_IMAGE_VERSION: 'v2.128.0'
.auto-deploy:
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -1,5 +1,5 @@
variables:
AUTO_DEPLOY_IMAGE_VERSION: 'v2.126.0'
AUTO_DEPLOY_IMAGE_VERSION: 'v2.128.0'
.auto-deploy:
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -46,7 +46,7 @@ module Gitlab
if fragments.any?
"(" + fragments.join(")\n#{operator_keyword_fragment}\n(") + ")"
else
'NULL'
relations.first&.to_sql.presence || 'NULL'
end
end
@ -60,6 +60,8 @@ module Gitlab
attr_reader :relations, :remove_duplicates, :remove_order
def verify_select_values!(relations)
return if relations.empty?
all_select_values = relations.map do |relation|
if relation.respond_to?(:select_values)
relation.select_values

View File

@ -5729,6 +5729,9 @@ msgstr ""
msgid "AiPowered|Automatic Code Reviews"
msgstr ""
msgid "AiPowered|Availability"
msgstr ""
msgid "AiPowered|By turning on these features, you accept the %{linkStart}GitLab Testing Agreement%{linkEnd}."
msgstr ""
@ -5819,6 +5822,12 @@ msgstr ""
msgid "AiPowered|GitLab Duo Core will be available to all users in your %{plan} plan, including Chat and Code Suggestions in supported IDEs. %{eligibilityLinkStart}Eligibility requirements apply%{eligibilityLinkEnd}. By enabling GitLab Duo, you accept the %{aiLinkStart}GitLab AI functionality terms%{aiLinkEnd}."
msgstr ""
msgid "AiPowered|GitLab Duo Enterprise availability"
msgstr ""
msgid "AiPowered|GitLab Duo Pro availability"
msgstr ""
msgid "AiPowered|GitLab Duo Pro or Enterprise"
msgstr ""
@ -6927,6 +6936,9 @@ msgstr ""
msgid "An error occurred while editing lock information, please try again."
msgstr ""
msgid "An error occurred while enabling GitLab Duo Core. Reload the page to try again."
msgstr ""
msgid "An error occurred while enabling Service Desk."
msgstr ""

View File

@ -1,4 +1,4 @@
ARG GDK_SHA=59037d83cf1134ab20ec3965f9c97442d566a35e
ARG GDK_SHA=718ad7b2919de30eeb9fbb09dc0d58ec19d36cad
# Use tag prefix when running on 'stable' branch to make sure 'protected' image is used which is not deleted by registry cleanup
ARG GDK_BASE_TAG_PREFIX

View File

@ -74,13 +74,16 @@ class SetPipelineName
end
def pipeline_tier
return unless ENV['CI_MERGE_REQUEST_LABELS']
return if expedited_pipeline?
tier_label = merge_request_labels.find { |label| label.start_with?('pipeline::tier-') }
return if tier_label.nil?
# the pipeline tier is detected by the `pipeline-tier-<tier>` job name.
pipeline_jobs.each do |job|
next unless job.name.start_with?('pipeline-tier-')
tier_label[/\d+\z/]
return job.name[/\d+\z/]
end
nil
end
def merge_request_labels
@ -96,27 +99,35 @@ class SetPipelineName
end
def expedited_pipeline?
merge_request_labels.any?('pipeline::expedited') ||
# TODO: Remove once the label is renamed to be scoped
merge_request_labels.any?('pipeline:expedite')
merge_request_labels.any?('pipeline::expedited')
end
def pipeline_types
return ['expedited'] if expedited_pipeline?
types = Set.new
api_client.pipeline_bridges(ENV['CI_PROJECT_ID'], ENV['CI_PIPELINE_ID']).auto_paginate do |job|
types.merge(pipeline_types_for(job))
end
api_client.pipeline_jobs(ENV['CI_PROJECT_ID'], ENV['CI_PIPELINE_ID']).auto_paginate do |job|
types = pipeline_jobs.each_with_object(Set.new) do |job, types|
types.merge(pipeline_types_for(job))
end
types.sort_by { |type| PIPELINE_TYPES_ORDERED.index(type) }
end
def pipeline_jobs
@pipeline_jobs ||= []
return @pipeline_jobs if @pipeline_jobs.any?
api_client.pipeline_bridges(ENV['CI_PROJECT_ID'], ENV['CI_PIPELINE_ID']).auto_paginate do |job|
@pipeline_jobs << job
end
api_client.pipeline_jobs(ENV['CI_PROJECT_ID'], ENV['CI_PIPELINE_ID']).auto_paginate do |job|
@pipeline_jobs << job
end
@pipeline_jobs
end
def pipeline_opts
return [] unless ENV['CI_MERGE_REQUEST_LABELS']

View File

@ -276,6 +276,7 @@ RSpec.describe 'Database schema',
members: 21,
merge_requests: 33,
namespaces: 26,
notes: 16,
p_ci_builds: 27,
p_ci_pipelines: 24,
packages_package_files: 16,

View File

@ -3,7 +3,7 @@
require 'spec_helper'
require_relative './shared_context_and_examples'
RSpec.describe 'CI configuration validation - branch pipelines', feature_category: :tooling, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/535543' do
RSpec.describe 'CI configuration validation - branch pipelines', feature_category: :tooling do
include ProjectForksHelper
include CiConfigurationValidationHelper

View File

@ -1,6 +1,7 @@
import { createWrapper } from '@vue/test-utils';
import Vue from 'vue';
import { injectVueAppBreadcrumbs, staticBreadcrumbs } from '~/lib/utils/breadcrumbs';
import { injectVueAppBreadcrumbs } from '~/lib/utils/breadcrumbs';
import { staticBreadcrumbs } from '~/lib/utils/breadcrumbs_state';
import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures';
import createMockApollo from 'helpers/mock_apollo_helper';
@ -8,6 +9,13 @@ describe('Breadcrumbs utils', () => {
const mockRouter = jest.fn();
const MockComponent = Vue.component('MockComponent', {
props: {
staticBreadcrumbs: {
type: Object,
required: false,
default: () => ({ items: [] }),
},
},
render: (createElement) =>
createElement('span', {
attrs: {
@ -69,5 +77,43 @@ describe('Breadcrumbs utils', () => {
).toHaveLength(1);
});
});
describe('when singleNavOptIn is enabled', () => {
const breadcrumbsHTML = `
<div id="js-vue-page-breadcrumbs-wrapper">
<nav id="js-vue-page-breadcrumbs" class="gl-breadcrumbs"></nav>
<div id="js-injected-page-breadcrumbs"></div>
</div>
`;
beforeEach(() => {
setHTMLFixture(breadcrumbsHTML);
staticBreadcrumbs.items = [
{ text: 'First', href: '/first' },
{ text: 'Last', href: '/last' },
];
});
it('removes the last item from staticBreadcrumbs.items and passes it to the component', () => {
const wrapper = createWrapper(
injectVueAppBreadcrumbs(
mockRouter,
MockComponent,
mockApolloProvider,
{},
{ singleNavOptIn: true },
),
);
expect(staticBreadcrumbs.items).toHaveLength(1);
expect(staticBreadcrumbs.items[0].text).toBe('First');
expect(staticBreadcrumbs.items[0].href).toBe('/first');
const component = wrapper.findComponent(MockComponent);
expect(component.props('staticBreadcrumbs')).toEqual({
items: [{ text: 'First', href: '/first' }],
});
});
});
});
});

View File

@ -16,6 +16,7 @@ describe('WorkItemBreadcrumb', () => {
isGroup = true,
workItemsViewPreference = false,
workItemsAlpha = false,
props = {},
} = {}) => {
wrapper = shallowMount(WorkItemBreadcrumb, {
provide: {
@ -31,6 +32,7 @@ describe('WorkItemBreadcrumb', () => {
mocks: {
$route,
},
propsData: props,
});
};
@ -157,6 +159,21 @@ describe('WorkItemBreadcrumb', () => {
);
});
it('combines static and dynamic breadcrumbs', () => {
createComponent({
$route: { name: 'workItem', params: { iid: '1' }, path: '/1' },
props: {
staticBreadcrumbs: { items: [{ text: 'Static', href: '/static' }] },
},
});
expect(findBreadcrumb().props('items')).toEqual([
{ text: 'Static', href: '/static' },
{ text: 'Work items', to: { name: 'workItemList', query: undefined } },
{ text: '#1', to: '/1' },
]);
});
it('renders work item iid breadcrumb on work item detail page', () => {
createComponent({ $route: { name: 'workItem', params: { iid: '1' }, path: '/1' } });

View File

@ -132,7 +132,7 @@ RSpec.describe Gitlab::Auth::SessionExpireFromInitEnforcer, feature_category: :s
session_expire_from_init: true,
session_expire_delay: 5
)
allow(instance).to receive(:proxy).and_return(devise_proxy)
allow(Devise::Hooks::Proxy).to receive(:new).and_return(devise_proxy)
end
it 'does not throw' do

View File

@ -1,204 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Ci::Components::Usages::Aggregator, :clean_gitlab_redis_shared_state, :freeze_time,
feature_category: :pipeline_composition do
let_it_be(:usage_start_date) { Date.today - 30.days }
let_it_be(:usage_end_date) { Date.today - 1.day }
let_it_be(:resources) { create_list(:ci_catalog_resource, 5).sort_by(&:id) }
let_it_be(:expected_usage_counts) { resources.zip([3, 17, 0, 1, 26]).to_h }
let(:usage_model) { Ci::Catalog::Resources::Components::Usage }
let(:target_model) { Ci::Catalog::Resource }
let(:group_by_column) { :catalog_resource_id }
let(:lease_key) { 'my_lease_key' }
let(:usage_window) do
Gitlab::Ci::Components::Usages::Aggregators::Cursor::Window.new(usage_start_date, usage_end_date)
end
before_all do
# Set up each resource with 1-5 versions, 1-5 components per version, and the expected usages per component
expected_usage_counts.each_with_index do |(resource, usage_count), i|
create_list(:ci_catalog_resource_version, i + 1, catalog_resource: resource).each do |version|
(1..i + 1).each do |j|
component = create(:ci_catalog_resource_component, version: version, name: "component#{j}")
(1..usage_count).each do |k|
# Inside the usage window
create(:ci_catalog_resource_component_usage,
component: component, used_date: usage_start_date, used_by_project_id: k)
# Outside the usage window
create(:ci_catalog_resource_component_usage,
component: component, used_date: usage_start_date - k.days, used_by_project_id: k)
end
end
end
end
end
describe '#each_batch' do
shared_examples 'when the aggregator is not interrupted' do
it 'returns the expected result' do
# We process all catalog resources and advance the cursor
batched_usage_counts, result = run_new_aggregator_each_batch
expect(batched_usage_counts).to eq(expected_batched_usage_counts)
expect(result.total_targets_completed).to eq(target_model.count)
expect(result.cursor_attributes).to eq({
target_id: 0,
usage_window: usage_window.to_h,
last_used_by_project_id: 0,
last_usage_count: 0,
max_target_id: target_model.maximum(:id).to_i
})
end
end
shared_examples 'with multiple distinct usage batches' do
before do
stub_const("#{described_class}::DISTINCT_USAGE_BATCH_SIZE", 2)
end
it_behaves_like 'when the aggregator is not interrupted'
context 'when the aggregator is interrupted' do
before do
# Sets the aggregator to break after the first iteration on each run
stub_const("#{described_class}::MAX_RUNTIME", 0)
end
it 'returns the expected result for each run' do
# On 1st run, we get an incomplete usage count for the first catalog resource
batched_usage_counts, result = run_new_aggregator_each_batch
expect(batched_usage_counts).to eq([])
expect(result.total_targets_completed).to eq(0)
expect(result.cursor_attributes).to eq({
target_id: target_model.first.id,
usage_window: usage_window.to_h,
last_used_by_project_id: 2,
last_usage_count: 2,
max_target_id: target_model.maximum(:id).to_i
})
# On 2nd run, we get the complete usage count for the first catalog resource and advance the cursor
batched_usage_counts, result = run_new_aggregator_each_batch
expect(batched_usage_counts).to eq([{ target_model.first => 3 }])
expect(result.total_targets_completed).to eq(1)
expect(result.cursor_attributes).to eq({
target_id: target_model.first.id + 1,
usage_window: usage_window.to_h,
last_used_by_project_id: 0,
last_usage_count: 0,
max_target_id: target_model.maximum(:id).to_i
})
all_batched_usage_counts = batched_usage_counts + repeat_new_aggregator_each_batch_until_done
batched_usage_counts_merged = all_batched_usage_counts.flatten.reduce(&:merge)
expect(batched_usage_counts_merged.length).to eq(5)
expect(batched_usage_counts_merged).to eq(expected_usage_counts)
end
context 'when a target is deleted between runs' do
it 'returns the expected result for each run' do
# On 1st run, we get an incomplete usage count for the first catalog resource
batched_usage_counts, result = run_new_aggregator_each_batch
expect(batched_usage_counts).to eq([])
expect(result.total_targets_completed).to eq(0)
expect(result.cursor_attributes).to eq({
target_id: target_model.first.id,
usage_window: usage_window.to_h,
last_used_by_project_id: 2,
last_usage_count: 2,
max_target_id: target_model.maximum(:id).to_i
})
target_model.first.delete
all_batched_usage_counts = repeat_new_aggregator_each_batch_until_done
batched_usage_counts_merged = all_batched_usage_counts.reduce(&:merge)
expect(batched_usage_counts_merged.length).to eq(4)
expect(batched_usage_counts_merged).to eq(expected_usage_counts.except(resources.first))
end
end
context 'when there are no usage records' do
it 'returns the expected result' do
usage_model.delete_all
all_batched_usage_counts = repeat_new_aggregator_each_batch_until_done
batched_usage_counts_merged = all_batched_usage_counts.reduce(&:merge)
expect(batched_usage_counts_merged.length).to eq(5)
expect(batched_usage_counts_merged).to eq(expected_usage_counts.transform_values { 0 })
end
end
end
end
it_behaves_like 'when the aggregator is not interrupted'
it_behaves_like 'with multiple distinct usage batches'
context 'with multiple target batches' do
before do
stub_const("#{described_class}::TARGET_BATCH_SIZE", 3)
end
it_behaves_like 'when the aggregator is not interrupted'
it_behaves_like 'with multiple distinct usage batches'
end
it 'prevents parallel processing with an exclusive lease guard' do
lease = Gitlab::ExclusiveLease.new(lease_key, timeout: 1.minute).tap(&:try_obtain)
result = run_new_aggregator_each_batch.last
expect(result).to be_nil
lease.cancel
end
end
private
def run_new_aggregator_each_batch
aggregator = described_class.new(
target_model: target_model,
group_by_column: group_by_column,
usage_start_date: usage_start_date,
usage_end_date: usage_end_date,
lease_key: lease_key
)
batched_usage_counts = []
result = aggregator.each_batch do |usage_counts|
batched_usage_counts << usage_counts
end
[batched_usage_counts, result]
end
def expected_batched_usage_counts
resources.each_slice(described_class::TARGET_BATCH_SIZE).map do |batch|
expected_usage_counts.slice(*batch)
end
end
def repeat_new_aggregator_each_batch_until_done
all_batched_usage_counts = []
30.times do
batched_usage_counts, result = run_new_aggregator_each_batch
all_batched_usage_counts << batched_usage_counts
break if result.cursor_attributes[:target_id] == 0
end
all_batched_usage_counts.flatten
end
end

View File

@ -40,8 +40,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::ComponentUsage, feature_category: :p
end
it 'creates a component usage record' do
expect { perform }.to change { Ci::Catalog::Resources::Components::Usage.count }.by(1)
.and change { Ci::Catalog::Resources::Components::LastUsage.count }.by(1)
expect { perform }.to change { Ci::Catalog::Resources::Components::LastUsage.count }.by(1)
end
context 'when component usage has already been recorded', :freeze_time do
@ -57,12 +56,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::ComponentUsage, feature_category: :p
used_by_project_id: project.id)
expect(last_usage.last_used_date).to eq(Time.current.to_date)
end
it 'does not create a component usage record' do
step.perform!
expect { perform }.not_to change { Ci::Catalog::Resources::Components::Usage.count }
end
end
end
end

View File

@ -582,7 +582,6 @@ project:
- catalog_resource_sync_events
- catalog_resource_versions
- ci_components
- ci_component_usages
- ci_component_last_usages
- external_status_checks
- base_tags

View File

@ -28,12 +28,6 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
have_many(:components).class_name('Ci::Catalog::Resources::Component').with_foreign_key(:catalog_resource_id))
end
it do
is_expected.to(
have_many(:component_usages).class_name('Ci::Catalog::Resources::Components::Usage')
.with_foreign_key(:catalog_resource_id))
end
it do
is_expected.to(
have_many(:component_last_usages).class_name('Ci::Catalog::Resources::Components::LastUsage')

View File

@ -8,7 +8,6 @@ RSpec.describe Ci::Catalog::Resources::Component, type: :model, feature_category
it { is_expected.to belong_to(:catalog_resource).class_name('Ci::Catalog::Resource') }
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:version).class_name('Ci::Catalog::Resources::Version') }
it { is_expected.to have_many(:usages).class_name('Ci::Catalog::Resources::Components::Usage') }
it { is_expected.to have_many(:last_usages).class_name('Ci::Catalog::Resources::Components::LastUsage') }
it_behaves_like 'a BulkInsertSafe model', described_class do

View File

@ -1,74 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::Catalog::Resources::Components::Usage, type: :model, feature_category: :pipeline_composition do
let_it_be(:component) { create(:ci_catalog_resource_component) }
let(:component_usage) { build(:ci_catalog_resource_component_usage, component: component) }
it { is_expected.to belong_to(:component).class_name('Ci::Catalog::Resources::Component') }
it { is_expected.to belong_to(:catalog_resource).class_name('Ci::Catalog::Resource') }
it { is_expected.to belong_to(:project).class_name('Project') }
describe 'validations' do
it { is_expected.to validate_presence_of(:component) }
it { is_expected.to validate_presence_of(:catalog_resource) }
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:used_by_project_id) }
it do
component_usage.save!
expect(component_usage).to validate_uniqueness_of(:used_date)
.scoped_to([:component_id, :used_by_project_id])
end
end
describe 'callbacks' do
describe 'used date', :freeze_time do
context 'when used date is not provided' do
it 'sets the used date to today' do
component_usage.save!
expect(component_usage.reload.used_date).to eq(Date.today)
end
end
context 'when used date is provided' do
it 'sets the given used date' do
component_usage.used_date = Date.today + 1.day
component_usage.save!
expect(component_usage.reload.used_date).to eq(Date.today + 1.day)
end
end
end
end
describe 'monthly partitioning', :freeze_time do
let(:partition_manager) { Gitlab::Database::Partitioning::PartitionManager.new(described_class) }
it 'drops partitions older than 12 months' do
# We start with the intialized partitions
oldest_partition = described_class.partitioning_strategy.current_partitions.min_by(&:from)
newest_partition = described_class.partitioning_strategy.current_partitions.max_by(&:from)
# We add one usage record into the oldest and newest partitions
create(:ci_catalog_resource_component_usage, component: component, used_date: oldest_partition.from)
create(:ci_catalog_resource_component_usage, component: component, used_date: newest_partition.from)
expect(described_class.count).to eq(2)
# After traveling forward 12 months from the oldest partition month
travel_to(oldest_partition.to + 12.months + 1.day)
# the oldest partition is dropped
partition_manager.sync_partitions
expect(described_class.partitioning_strategy.current_partitions.include?(oldest_partition)).to eq(false)
# and we only have the usage record from the remaining partitions
expect(described_class.count).to eq(1)
end
end
end

View File

@ -58,7 +58,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it { is_expected.to have_one(:slack_integration) }
it { is_expected.to have_one(:catalog_resource) }
it { is_expected.to have_many(:ci_components).class_name('Ci::Catalog::Resources::Component') }
it { is_expected.to have_many(:ci_component_usages).class_name('Ci::Catalog::Resources::Components::Usage') }
it { is_expected.to have_many(:ci_component_last_usages).class_name('Ci::Catalog::Resources::Components::LastUsage').inverse_of(:component_project) }
it { is_expected.to have_many(:catalog_resource_versions).class_name('Ci::Catalog::Resources::Version') }
it { is_expected.to have_many(:catalog_resource_sync_events).class_name('Ci::Catalog::Resources::SyncEvent') }

View File

@ -38,6 +38,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['default_project_visibility']).to be_a String
expect(json_response['default_snippet_visibility']).to be_a String
expect(json_response['default_group_visibility']).to be_a String
expect(json_response['deletion_adjourned_period']).to be_a(Integer)
expect(json_response['rsa_key_restriction']).to eq(0)
expect(json_response['dsa_key_restriction']).to eq(0)
expect(json_response['ecdsa_key_restriction']).to eq(0)

View File

@ -13,7 +13,7 @@ RSpec.describe SetPipelineName, feature_category: :tooling do
let(:project_id) { '123' }
let(:merge_request_iid) { '1234' }
let(:pipeline_id) { '5678' }
let(:merge_request_labels) { ['Engineering Productivity', 'type::feature', 'pipeline::tier-3'] }
let(:merge_request_labels) { ['Engineering Productivity', 'type::feature'] }
let(:put_url) { "https://gitlab.test/api/v4/projects/#{project_id}/pipelines/#{pipeline_id}/metadata" }
@ -81,12 +81,20 @@ RSpec.describe SetPipelineName, feature_category: :tooling do
end
context 'when the pipeline is from a merge request' do
it 'adds a pipeline tier' do
instance.execute
shared_examples 'tiered pipeline' do |tier|
let(:jobs) { ['docs-lint markdown', "pipeline-tier-#{tier}"] }
expect(WebMock).to have_requested(:put, put_url).with { |req| req.body.include?('tier:3') }
it "adds tier:#{tier}" do
instance.execute
expect(WebMock).to have_requested(:put, put_url).with { |req| req.body.include?("tier:#{tier}") }
end
end
it_behaves_like 'tiered pipeline', 1
it_behaves_like 'tiered pipeline', 2
it_behaves_like 'tiered pipeline', 3
it 'adds the pipeline types' do
instance.execute
@ -96,7 +104,7 @@ RSpec.describe SetPipelineName, feature_category: :tooling do
}
end
context 'when the merge request does not have a pipeline tier label' do
context 'when the merge request does not have a pipeline tier' do
let(:merge_request_labels) { ['Engineering Productivity', 'type::feature'] }
it 'adds the N/A pipeline tier' do

View File

@ -25,35 +25,6 @@ RSpec.describe Ci::Catalog::Resources::AggregateLast30DayUsageService, :clean_gi
let(:lease_key) { described_class.name }
let(:service) { described_class.new }
before_all do
# Set up each resource with 1-4 versions, 1-4 components per version, and the expected usages per component
expected_ordered_usage_counts.each_with_index do |usage_count, i|
resource = resources[i]
create_list(:ci_catalog_resource_version, i + 1, catalog_resource: resource).each do |version|
(1..i + 1).each do |j|
component = create(:ci_catalog_resource_component, version: version, name: "component#{j}")
(1..usage_count).each do |mock_used_by_project_id|
# Inside the usage window
create(:ci_catalog_resource_component_usage,
component: component, used_date: usage_start_date, used_by_project_id: mock_used_by_project_id)
# Outside the usage window
create(:ci_catalog_resource_component_usage,
component: component, used_date: usage_start_date - mock_used_by_project_id.days,
used_by_project_id: mock_used_by_project_id)
# create new usage records in the window
create(:catalog_resource_component_last_usage, component: component, last_used_date: usage_start_date,
used_by_project_id: mock_used_by_project_id)
end
end
end
end
Ci::Catalog::Resource.update_all(last_30_day_usage_count_updated_at: initial_usage_count_updated_at)
end
context 'when storing usage data in catalog_resource_component_last_usages' do
describe '#execute' do
it 'updates component usage counts' do

View File

@ -12,18 +12,13 @@ RSpec.describe Ci::Components::Usages::CreateService, feature_category: :pipelin
subject(:execute) { service.execute }
it 'creates a usage record and updates last_usage', :aggregate_failures do
expect { execute }.to change { Ci::Catalog::Resources::Components::Usage.count }.by(1)
.and change { Ci::Catalog::Resources::Components::LastUsage.count }.by(1)
expect { execute }.to change { Ci::Catalog::Resources::Components::LastUsage.count }.by(1)
expect(execute).to be_success
expect(execute.message).to eq('Usage recorded')
usage = Ci::Catalog::Resources::Components::Usage.find_by(component: component)
last_usage = Ci::Catalog::Resources::Components::LastUsage.find_by(component: component,
used_by_project_id: project.id)
expect(usage.catalog_resource).to eq(component.catalog_resource)
expect(usage.project).to eq(component.project)
expect(usage.used_by_project_id).to eq(project.id)
expect(last_usage.last_used_date).to be_present
end
@ -40,34 +35,6 @@ RSpec.describe Ci::Components::Usages::CreateService, feature_category: :pipelin
used_by_project_id: project.id)
expect(last_usage.last_used_date).to eq(Time.current.to_date)
end
it 'does not create a usage record' do
service.execute
expect { execute }.not_to change { Ci::Catalog::Resources::Components::Usage.count }
expect(execute).to be_success
expect(execute.message).to eq('Usage already recorded for today')
end
end
context 'when usage is invalid' do
before do
usage = instance_double(
Ci::Catalog::Resources::Components::Usage, save: false,
errors: instance_double(ActiveModel::Errors, full_messages: ['msg 1', 'msg 2'], size: 2))
allow(Ci::Catalog::Resources::Components::Usage).to receive(:new).and_return(usage)
end
it 'does not create a usage record' do
expect { execute }.not_to change { Ci::Catalog::Resources::Components::Usage.count }
end
it 'tracks exception and returns error response' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).once
expect(execute).to be_error
expect(execute.message).to eq('msg 1, msg 2')
end
end
end
end

View File

@ -58,20 +58,22 @@ RSpec.describe Projects::CreateFromTemplateService, feature_category: :groups_an
end
context 'the result project' do
let(:project) { subject.execute }
before do
perform_enqueued_jobs do
@project = subject.execute
project
end
@project.reload
project.reload
end
it 'overrides template description' do
expect(@project.description).to match('project description')
expect(project.description).to match('project description')
end
it 'overrides template visibility_level' do
expect(@project.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
end
end
end

View File

@ -36,10 +36,16 @@ RSpec.shared_examples 'SQL set operator' do |operator_keyword|
expect(set_operator.to_sql).to include("#{operator_keyword} ALL")
end
it 'returns `NULL` if all relations are empty' do
it 'returns the first empty relation if all relations are empty' do
empty_relation = User.none
set_operator = described_class.new([empty_relation, empty_relation])
expect(set_operator.to_sql).to eq(empty_relation.to_sql)
end
it 'returns `NULL` if there are no relations' do
set_operator = described_class.new([])
expect(set_operator.to_sql).to eq('NULL')
end
end

View File

@ -11,7 +11,8 @@ RSpec.describe Ci::Catalog::Resources::AggregateLast30DayUsageWorker, feature_ca
it 'has the option to reschedule once if deduplicated and a TTL' do
expect(described_class.get_deduplication_options).to include(
{ if_deduplicated: :reschedule_once, ttl: Gitlab::Ci::Components::Usages::Aggregator::WORKER_DEDUP_TTL })
{ if_deduplicated: :reschedule_once,
ttl: Ci::Catalog::Resources::AggregateLast30DayUsageWorker::WORKER_DEDUP_TTL })
end
describe '#perform' do

View File

@ -17,9 +17,9 @@ module Tooling
PATTERNS_DIR = File.expand_path('patterns', __dir__)
def initialize
@patterns = load_yaml_file('single_line_patterns.yml')
@multiline_patterns = load_yaml_file('multiline_patterns.yml')
@catchall_patterns = load_yaml_file('catchall_patterns.yml')
@patterns = load_patterns_from_file('single_line_patterns.yml')
@multiline_patterns = load_patterns_from_file('multiline_patterns.yml')
@catchall_patterns = load_patterns_from_file('catchall_patterns.yml')
end
def process(job_trace)
@ -55,8 +55,19 @@ module Tooling
attr_accessor :patterns, :multiline_patterns, :catchall_patterns
def load_yaml_file(filename)
YAML.safe_load_file(File.join(PATTERNS_DIR, filename), symbolize_names: true)
def load_patterns_from_file(filename)
yaml_data = YAML.safe_load_file(File.join(PATTERNS_DIR, filename), permitted_classes: [Symbol])
result = []
categories = yaml_data.each_value.first
categories.each do |category_name, category_data|
category_data['patterns'].each do |pattern|
result << { pattern: pattern, failure_category: category_name.to_s }
end
end
result
end
end
end

View File

@ -1,114 +1,202 @@
# catchall_patterns.yml
# Ruby
- pattern: "Control frame information"
failure_category: "ruby_crash_core_dump"
- pattern: "ruby: YJIT has panicked"
failure_category: "ruby_yjit_panick"
- pattern: "OpenSSL::SSL::SSLError"
failure_category: "ruby_openssl"
- pattern: "LoadError:"
failure_category: "ruby_could_not_load_file"
- pattern: "cannot load such file"
failure_category: "ruby_could_not_load_file"
- pattern: "undefined local variable or method `"
failure_category: "ruby_undefined_method_or_variable"
- pattern: "undefined method `"
failure_category: "ruby_undefined_method_or_variable"
- pattern: "FrozenError:"
failure_category: "ruby_frozen"
- pattern: "wrong argument type.+expected.+"
failure_category: "ruby_wrong_argument_type"
- pattern: "uninitialized constant "
failure_category: "ruby_uninitialized_constant"
- pattern: "GitlabSettings::MissingSetting"
failure_category: "ruby_gitlab_settings_missing_setting"
- pattern: "syntax error, unexpected"
failure_category: "ruby_syntax"
- pattern: "SyntaxError"
failure_category: "ruby_syntax"
- pattern: "EOFError"
failure_category: "ruby_eof"
- pattern: "TypeError:"
failure_category: "ruby_type"
- pattern: "RuntimeError"
failure_category: "ruby_runtime_exception"
- pattern: "unknown keyword:"
failure_category: "ruby_unknown_keyword"
- pattern: "wrong number of arguments \\(gven"
failure_category: "ruby_wrong_number_of_arguments"
- pattern: "bundler: failed to load command: "
failure_category: "ruby_bundler_command_failed"
- pattern: "Failure/Error:"
failure_category: "ruby_generic_failure"
- pattern: ":in `"
failure_category: "ruby_generic_failure"
# Important: The order of failure categories matter here. It's going to match from top to bottom.
failure_categories:
ruby_crash_core_dump:
description: "Ruby interpreter crashes with a core dump, often showing stack traces with 'Control frame information'. These indicate severe runtime errors like memory corruption or bugs in C extensions that cause the Ruby VM to terminate unexpectedly."
patterns:
- "Control frame information"
# Timeouts
- pattern: "execution took longer than 1h30m0s seconds"
failure_category: "job_timeouts"
- pattern: "Rspec suite is exceeding the 80 minute limit and is forced to exit with error"
failure_category: "rspec_at_80_min"
ruby_yjit_panick:
description: "Panic errors in Ruby's YJIT (Yet Another Just-In-Time) compiler, which accelerates Ruby code execution. YJIT panics typically indicate internal compiler bugs or memory-related issues in the JIT implementation."
patterns:
- "ruby: YJIT has panicked"
# Makefile
- pattern: "make: .+ Error 1"
failure_category: "makefile"
ruby_openssl:
description: "OpenSSL-related errors in Ruby, typically involving SSL certificate validation failures, connection issues, or encryption/decryption problems. Often seen during HTTPS connections to external services."
patterns:
- "OpenSSL::SSL::SSLError"
# Shell
- pattern: "unbound variable"
failure_category: "shell_unbound_variable"
- pattern: ": syntax error"
failure_category: "shell_syntax"
- pattern: ": Permission denied"
failure_category: "shell_permission"
- pattern: ": No such file or directory"
failure_category: "shell_file_not_found"
- pattern: ": command not found"
failure_category: "shell_command_not_found"
- pattern: ": not in a function"
failure_category: "shell_not_in_function"
- pattern: "readonly variable"
failure_category: "shell_readonly_variable"
- pattern: "gzip: stdin: unexpected end of file"
failure_category: "shell_could_not_gzip"
ruby_could_not_load_file:
description: "Ruby cannot load required files or libraries, which may be due to missing gems, incorrect load paths, or dependency issues. These errors prevent code from being properly loaded and executed."
patterns:
- "LoadError:"
- "cannot load such file"
# Infrastructure
- pattern: "ERROR: .+ IO ERROR"
failure_category: "io"
- pattern: "curl.+The requested URL returned error"
failure_category: "could_not_curl"
- pattern: "Connection reset by peer - SSL_connect"
failure_category: "ssl_connect_reset_by_peer"
- pattern: "Server responded with code"
failure_category: "http"
- pattern: "400 Bad Request"
failure_category: "http"
- pattern: "503 Service Unavailable"
failure_category: "http"
- pattern: "Net::HTTPClientException"
failure_category: "http"
- pattern: "Net::HTTPFatalError"
failure_category: "http"
- pattern: "Error: Failed to open TCP connection to "
failure_category: "failed_to_open_tcp_connection"
- pattern: "fatal: Authentication failed for"
failure_category: "authentication_failures"
- pattern: "HTTP Basic: Access denied"
failure_category: "authentication_failures"
ruby_undefined_method_or_variable:
description: "References to undefined local variables or methods in Ruby code, typically caused by typos, missing method definitions, or scope issues. These errors occur when code tries to access variables or call methods that don't exist."
patterns:
- "undefined local variable or method `"
- "undefined method `"
# Job canceled
- pattern: "ERROR: Job failed: canceled"
failure_category: "unknown_failure_canceled"
ruby_frozen:
description: "Attempts to modify frozen (immutable) objects in Ruby, such as strings, arrays, or hashes that have been marked as read-only. Occurs when code tries to alter objects that have been frozen with the freeze method."
patterns:
- "FrozenError:"
# E2E
- pattern: "failed to load command: bin/qa"
failure_category: "e2e_specs"
- pattern: "failed to load command: gitlab-qa"
failure_category: "e2e_specs"
- pattern: "QA::Tools::ReadinessCheck::ReadinessCheckError"
failure_category: "e2e_specs"
- pattern: "Testcase link violations detected"
failure_category: "e2e_lint"
ruby_wrong_argument_type:
description: "Type mismatch errors where methods receive arguments of the wrong type. These occur when a method expects one type of object (like a Module) but receives another (like a Class)."
patterns:
- "wrong argument type.+expected.+"
# Other
- pattern: "An unexpected error occurred"
failure_category: "unexpected"
ruby_uninitialized_constant:
description: "References to Ruby constants (classes or modules) that haven't been defined or properly loaded. Often occurs due to missing requires, autoloading issues, or namespace problems."
patterns:
- "uninitialized constant "
ruby_gitlab_settings_missing_setting:
description: "Missing configuration settings in GitLab's settings framework, occurring when code tries to access configuration options that haven't been defined. Usually requires updating configuration files or adding missing settings."
patterns:
- "GitlabSettings::MissingSetting"
ruby_syntax:
description: "Ruby syntax errors, including unexpected tokens, missing keywords, or malformed code structures. These prevent code from being parsed and must be fixed before execution."
patterns:
- "syntax error, unexpected"
- "SyntaxError"
ruby_eof:
description: "Unexpected end-of-file errors in Ruby, typically occurring when reading from streams or files that unexpectedly terminate. Often seen in network operations or file parsing."
patterns:
- "EOFError"
ruby_type:
description: "Ruby type errors where operations are attempted on incompatible types, such as treating a non-module as a module or attempting operations not supported by a particular object type."
patterns:
- "TypeError:"
ruby_runtime_exception:
description: "Generic runtime exceptions in Ruby code, representing a wide range of operational errors that occur during program execution rather than at parse time."
patterns:
- "RuntimeError"
ruby_unknown_keyword:
description: "Method calls with unknown keyword arguments, usually due to API changes, typos in keyword names, or version mismatches between libraries."
patterns:
- "unknown keyword:"
ruby_wrong_number_of_arguments:
description: "Method calls with an incorrect number of arguments, either too few or too many. Usually caused by API changes or misunderstanding of method signatures."
patterns:
- "wrong number of arguments \\(gven"
ruby_bundler_command_failed:
description: "Failures when Bundler attempts to load and execute Ruby commands, often due to dependency issues, environment problems, or errors in the executed command itself."
patterns:
- "bundler: failed to load command: "
ruby_generic_failure:
description: "Generic Ruby errors that don't match more specific categories, typically shown in RSpec test failures or stack traces. Used as a fallback for Ruby errors not captured by other patterns."
patterns:
- "Failure/Error:"
- ":in `"
job_timeouts:
description: "CI/CD job execution timeouts, occurring when jobs run longer than their configured time limits (often 90 minutes for GitLab CI). These may indicate infinite loops, performance issues, or jobs that simply need more time."
patterns:
- "execution took longer than 1h30m0s seconds"
rspec_at_80_min:
description: "RSpec test suite timeouts at the 80-minute mark, a specific limit set for GitLab's test suite to prevent excessively long-running tests. Test suites exceeding this limit are forcibly terminated."
patterns:
- "Rspec suite is exceeding the 80 minute limit and is forced to exit with error"
makefile:
description: "Failures in Makefile-based build processes, often occurring during compilation of C/C++ code, GitLab components like Gitaly, or when running make-based commands. The Error 1 indicates a non-zero exit status from a command."
patterns:
- "make: .+ Error 1"
shell_unbound_variable:
description: "References to undefined shell variables in bash scripts, occurring when scripts attempt to use variables that haven't been set or have gone out of scope."
patterns:
- "unbound variable"
shell_syntax:
description: "Shell script syntax errors, including malformed commands, missing quotes, incorrect control structures, or other bash syntax issues that prevent script execution."
patterns:
- ": syntax error"
shell_permission:
description: "Permission denied errors in shell commands, typically due to insufficient file access rights, attempting to write to read-only locations, or execute files without execute permissions."
patterns:
- ": Permission denied"
shell_file_not_found:
description: "Attempts to access files or directories that don't exist in shell commands, often due to incorrect paths, missing files, or failed file generation steps."
patterns:
- ": No such file or directory"
shell_command_not_found:
description: "References to commands that don't exist or aren't in the system PATH, typically due to missing dependencies, uninstalled tools, or typos in command names."
patterns:
- ": command not found"
shell_not_in_function:
description: "Shell script errors related to function context, typically when using function-specific commands like 'return' outside of a function definition."
patterns:
- ": not in a function"
shell_readonly_variable:
description: "Attempts to modify read-only shell variables, which are protected from changes. Often seen with environment variables or constants that shouldn't be altered during execution."
patterns:
- "readonly variable"
shell_could_not_gzip:
description: "Failures when attempting to compress files with gzip, particularly when the input stream ends unexpectedly. May indicate truncated files or interrupted streams."
patterns:
- "gzip: stdin: unexpected end of file"
io:
description: "Input/Output errors during file operations, network transfers, or device interactions. These indicate low-level problems with reading from or writing to resources."
patterns:
- "ERROR: .+ IO ERROR"
could_not_curl:
description: "Failures when using curl to make HTTP requests, typically due to network issues, invalid URLs, or server errors. Often includes HTTP error codes that provide more specific information."
patterns:
- "curl.+The requested URL returned error"
ssl_connect_reset_by_peer:
description: "SSL connection reset errors during secure communications, often due to network interruptions, server-side SSL configuration issues, or certificate problems."
patterns:
- "Connection reset by peer - SSL_connect"
http:
description: "HTTP-related errors when making web requests, including client errors (4xx), server errors (5xx), and exceptions in HTTP client libraries. These indicate problems with API interactions or web service communications."
patterns:
- "Server responded with code"
- "400 Bad Request"
- "503 Service Unavailable"
- "Net::HTTPClientException"
- "Net::HTTPFatalError"
failed_to_open_tcp_connection:
description: "Failures to establish TCP network connections, typically due to network issues, firewalls, incorrect hostnames/IPs, or services not running on the expected ports."
patterns:
- "Error: Failed to open TCP connection to "
authentication_failures:
description: "Authentication failures when accessing protected resources, including Git repositories, Docker registries, or API endpoints. Usually due to invalid credentials, expired tokens, or insufficient permissions."
patterns:
- "fatal: Authentication failed for"
- "HTTP Basic: Access denied"
unknown_failure_canceled:
description: "Job cancellations with unclear causes, possibly due to manual cancellation, GitLab Runner interruptions, or system-level issues. These jobs are terminated before normal completion."
patterns:
- "ERROR: Job failed: canceled"
e2e_specs:
description: "End-to-end test failures specific to GitLab's QA framework, including failures to load the QA tools, failed readiness checks, or other issues with the end-to-end testing infrastructure."
patterns:
- "failed to load command: bin/qa"
- "failed to load command: gitlab-qa"
- "QA::Tools::ReadinessCheck::ReadinessCheckError"
e2e_lint:
description: "Linting issues in end-to-end tests, particularly related to testcase linking conventions that ensure proper documentation and traceability for tests."
patterns:
- "Testcase link violations detected"
unexpected:
description: "Generic unexpected errors that don't match other categories, serving as a catch-all for miscellaneous issues. These often require manual investigation to determine the root cause."
patterns:
- "An unexpected error occurred"

View File

@ -1,15 +1,45 @@
# multiline_patterns.yml
- pattern: "Failed examples:,expected\\(:| # | \\[\\)"
failure_category: "rspec_valid_rspec_errors_or_flaky_tests"
# Important: These patterns match across multiple lines in the job logs.
#
# Each pattern uses a comma to separate the different parts that must appear in the log, but not necessarily on the same line.
failure_categories:
rspec_valid_rspec_errors_or_flaky_tests:
description: |
Legitimate RSpec test failures that indicate actual code issues or flaky tests, as opposed to infrastructure problems.
- pattern: "Failed examples:,Failure/Error:"
failure_category: "rspec_valid_rspec_errors_or_flaky_tests"
These include expectation failures (expected vs. got), assertion failures, and other test-specific errors. The patterns
match logs that contain both the 'Failed examples:' section and either expectation outputs or general failure messages.
- pattern: "Ran all test suites,Command failed with exit code 1"
failure_category: "jest"
When you see this failure category, it likely means there's an actual issue with the code or tests that needs to be
addressed, rather than a CI infrastructure problem.
patterns:
- "Failed examples:,expected\\(:| # | \\[\\)"
- "Failed examples:,Failure/Error:"
- pattern: "Ran all test suites,exited with status 1"
failure_category: "jest"
jest:
description: |
JavaScript test failures in Jest framework, which is used for testing GitLab's frontend components.
- pattern: "DANGER_GITLAB_API_TOKEN,Errors:"
failure_category: "danger"
These failures indicate issues in the JavaScript/Vue code rather than infrastructure problems. The patterns match
logs showing Jest test suite completion followed by a non-zero exit code, typically from assertion failures,
timeouts, or errors in the test code itself.
patterns:
- "Ran all test suites,Command failed with exit code 1"
- "Ran all test suites,exited with status 1"
danger:
description: |
Failures in the Danger code review tool, which automatically checks merge requests for common issues.
These failures typically indicate that the MR doesn't meet GitLab's contribution guidelines in some way. The pattern
matches logs containing both authentication token references and error outputs from Danger's validation checks.
Common Danger failures include:
- Commit message formatting issues
- Missing documentation updates
- Missing changelog entries
- Large merge requests that should be split
- Missing test coverage
Danger failures should be addressed before merging as they help maintain code quality standards.
patterns:
- "DANGER_GITLAB_API_TOKEN,Errors:"

View File

@ -1,289 +1,363 @@
# patterns.yml
- pattern: "If needed, you can retry the.+`danger-review` job"
failure_category: "danger"
# Database - DB migrations
- pattern: "Error: rollback of added migrations does not revert db/structure.sql to previous state, please investigate"
failure_category: "db_migrations"
- pattern: "the committed db/structure.sql does not match the one generated by running added migrations"
failure_category: "db_migrations"
- pattern: "the committed files in db/schema_migrations do not match those expected by the added migrations"
failure_category: "db_migrations"
- pattern: "You have.+pending migrations"
failure_category: "db_migrations"
- pattern: "Column operations, like dropping, renaming or primary key conversion"
failure_category: "db_migrations"
- pattern: "createdb: error:"
failure_category: "db_migrations"
- pattern: "Batched migration should be finalized only after at-least one required stop from queuing it"
failure_category: "db_migrations"
- pattern: "Table.+is write protected within this Gitlab database"
failure_category: "db_table_write_protected"
- pattern: "Unsupported cross-join across"
failure_category: "db_table_write_protected"
- pattern: "Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection::CrossSchemaAccessError"
failure_category: "db_cross_schema_access"
- pattern: "raise_database_connection_made_error"
failure_category: "db_connection_in_rails_initializer"
- pattern: "Any new or deleted tables must be added to the database dictionary"
failure_category: "rails_pg_not_in_database_dictionary"
- pattern: "has no foreign key for"
failure_category: "rails_pg_no_foreign_key"
- pattern: "PG::ActiveSqlTransaction"
failure_category: "rails_pg_active_sql_transaction"
- pattern: "PG::CheckViolation"
failure_category: "rails_pg_check_violation"
- pattern: "PG::DependentObjectsStillExist"
failure_category: "rails_pg_dependent_objects_still_exist"
- pattern: "PG::DuplicateAlias"
failure_category: "rails_pg_duplicate_alias"
- pattern: "PG::DuplicateTable"
failure_category: "rails_pg_duplicate_table"
- pattern: "PG::InvalidColumnReference"
failure_category: "rails_pg_invalid_column_reference"
- pattern: "PG::UndefinedColumn"
failure_category: "rails_pg_undefined_column"
- pattern: "PG::UndefinedTable"
failure_category: "rails_pg_undefined_table"
- pattern: "Gitlab::SidekiqSharding::Validator::UnroutedSidekiqApiError"
failure_category: "rails_pg_sidekiq"
- pattern: "psql:.+ERROR:"
failure_category: "psql_failed_command"
- pattern: "Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DMLAccessDeniedError:"
failure_category: "rake_unallowed_schemas_accessed"
- pattern: "Sidekiq::Job::EnqueueFromTransactionError"
failure_category: "rake_enqueue_from_transaction"
- pattern: "Gitlab::Database::GitlabSchema::UnknownSchemaError"
failure_category: "rake_db_unknown_schema"
- pattern: "ActiveRecord::UnknownPrimaryKey"
failure_category: "rake_rails_unknown_primary_key"
# Catch-alls for database-related errors
- pattern: "An error has occurred, this and all later migrations canceled"
failure_category: "error_in_db_migration"
- pattern: "An error has occurred, all later migrations canceled"
failure_category: "error_in_db_migration"
- pattern: "ActiveRecord::StatementInvalid"
failure_category: "rails_invalid_sql_statement"
# Linting
- pattern: "needs to be regenerated, please run:"
failure_category: "graphql_lint"
- pattern: "GraphQL quer.+out of.+failed validation:"
failure_category: "graphql_lint"
- pattern: "node scripts/frontend/eslint.js . --format gitlab"
failure_category: "eslint"
- pattern: "Running ESLint with the following rules enabled"
failure_category: "eslint"
- pattern: "ERROR: lint test\\(s\\) failed.+Review the log carefully to see full listing"
failure_category: "docs_lint_failed"
- pattern: "files inspected,.+lints? detected"
failure_category: "docs_lint_failed"
- pattern: "Issues found in .+input.+Find details below."
failure_category: "docs_lint_failed"
- pattern: "scripts/lint-docs-redirects.rb"
failure_category: "docs_lint_failed"
- pattern: "git diff --exit-code db/docs"
failure_category: "docs_lint_failed"
- pattern: "documentation is outdated.+Please update it by running"
failure_category: "docs_outdated"
- pattern: "scripts/cells/ci-ensure-application-settings-have-definition-file.rb"
failure_category: "cells_lint"
- pattern: "blocking Pajamas violation\\(s\\) found."
failure_category: "pajamas_violations"
- pattern: "Merge request scan exit status: 2"
failure_category: "pajamas_violations"
- pattern: "yamllint "
failure_category: "yaml_lint_failed"
- pattern: "Not all PO-files are valid"
failure_category: "rake_some_po_files_invalid"
- pattern: "Changes in translated strings found, please update file"
failure_category: "rake_outdated_translated_strings"
- pattern: "ERROR: Deprecations documentation is outdated"
failure_category: "docs_deprecations_outdated"
# Dependencies (Bundler, Yarn, ...)
- pattern: "Found problems with the lockfile"
failure_category: "frontend_lockfile"
- pattern: "Your lockfile needs to be updated, but yarn was run with"
failure_category: "frontend_lockfile"
- pattern: "Peer dependency violation"
failure_category: "yarn_dependency_violation"
- pattern: "yarn run.+failed with the following error"
failure_category: "yarn_run"
- pattern: "changed, but the lockfile can't be updated"
failure_category: "gemfile_issues"
- pattern: "Your lockfile does not satisfy dependencies of"
failure_category: "gemfile_issues"
- pattern: "contains outdated dependencies"
failure_category: "gemfile_issues"
- pattern: "You have already activated"
failure_category: "gemfile_issues"
- pattern: "but your Gemfile requires"
failure_category: "gemfile_issues"
- pattern: "\\(r-\\)generate Gemfile.checksum with"
failure_category: "gemfile_issues"
- pattern: "Bundler cannot continue installing"
failure_category: "gemfile_issues"
- pattern: "Cached checksum for .+ not found"
failure_category: "gemfile_issues"
- pattern: "Bundler::GemNotFound"
failure_category: "gems_not_found"
- pattern: "Gem::Ext::BuildError: ERROR: Failed to build gem native extension."
failure_category: "gems_build"
- pattern: "ERROR: Checksum mismatch for `bao-linux-amd64`"
failure_category: "bao_linux_checksum_mismatch"
- pattern: "running /usr/local/bin/pipenv sync .+: exit status 1"
failure_category: "gemnasium-python-dependency_scanning"
- pattern: "\\[gemnasium-python\\] .+ pipenv sync failed"
failure_category: "gemnasium-python-dependency_scanning"
- pattern: "\\[FATA\\] \\[dependency-scanning\\].+ permission denied"
failure_category: "dependency-scanning_permission_denied"
- pattern: "Error calling /monitor/project/"
failure_category: "package_hunter"
# Git
- pattern: "cloning repository: exit status 128"
failure_category: "git_issues"
- pattern: "did not match any file\\(s\\) known to git"
failure_category: "git_issues"
- pattern: "failed to push some refs to 'https://gitlab.com/gitlab-org/gitlab-foss.git'"
failure_category: "as_if_foss_git_push_issues"
- pattern: "fatal: couldn't find remote ref"
failure_category: "git_issues"
- pattern: "fatal: expected flush after ref listing"
failure_category: "git_issues"
- pattern: "fatal: fetch-pack: invalid index-pack output"
failure_category: "git_issues"
- pattern: "fatal: Not a valid object name"
failure_category: "git_issues"
- pattern: "fatal: protocol error: bad pack header"
failure_category: "git_issues"
- pattern: "fatal: the remote end hung up unexpectedly"
failure_category: "git_issues"
- pattern: "TimeoutExpired: Command '\\['git', 'fetch'"
failure_category: "git_issues"
# Rubocop
- pattern: "offenses? detected"
failure_category: "rubocop"
- pattern: "=== Filtered warnings ==="
failure_category: "rubocop"
# Jest
- pattern: "Command .+ node_modules/.bin/jest.+ exited with status 1"
failure_category: "jest"
# Undercover
- pattern: "some methods have no test coverage!"
failure_category: "rspec_undercoverage"
# Gitaly
- pattern: "gitaly spawn failed"
failure_category: "gitaly_spawn_failed"
# Apollo
- pattern: "Loading Apollo Project"
failure_category: "apollo"
# RSpec tests that already failed on default branch
- pattern: "ERROR: Job failed: exit code 112"
failure_category: "rspec_test_already_failed_on_default_branch"
# Assets Compilation
- pattern: "Error: Unable to compile webpack production bundle"
failure_category: "assets_compilation"
- pattern: "webpack-cli.+Error: EEXIST: file already exists"
failure_category: "webpack_cli"
# VueJS 3
- pattern: "Expected unset environment variable"
failure_category: "vuejs3"
- pattern: "either now pass under Vue 3, or no longer exist"
failure_category: "vuejs3"
# RSpec usage
- pattern: "The use of doubles or partial doubles from rspec-mocks outside of the per-test lifecycle is not supported."
failure_category: "rspec_usage"
- pattern: "Could not find shared context"
failure_category: "rspec_usage"
- pattern: "Could not find shared examples"
failure_category: "rspec_usage"
- pattern: "is not available on an example group"
failure_category: "rspec_usage"
- pattern: "WebMock::NetConnectNotAllowedError"
failure_category: "rspec_usage"
# Infrastructure issues
- pattern: "GitLab is currently unable to handle this request due to load."
failure_category: "gitlab_too_much_load"
- pattern: "ERROR: Job failed: failed to pull image"
failure_category: "failed_to_pull_image"
- pattern: "Is the docker daemon running"
failure_category: "docker_not_running"
- pattern: "no space left on device"
failure_category: "no_space_left"
- pattern: "There was insufficient space remaining on the device"
failure_category: "no_space_left"
- pattern: "Uploading artifacts .+ 502 Bad Gateway"
failure_category: "artifacts_upload_502"
- pattern: "500 Internal Server Error"
failure_category: "http_500"
- pattern: "Error: Kubernetes cluster unreachable"
failure_category: "kubernetes"
- pattern: "502 Server Error"
failure_category: "http_502"
- pattern: "502 \"Bad Gateway\""
failure_category: "http_502"
- pattern: "status code: 502"
failure_category: "http_502"
- pattern: "The requested URL returned error: 500"
failure_category: "gitlab_unavailable"
- pattern: "GitLab is not responding"
failure_category: "gitlab_unavailable"
- pattern: "fatal: unable to access 'https://gitlab.com"
failure_category: "gitlab_unavailable"
- pattern: "PG::ConnectionBad"
failure_category: "postgresql_unavailable"
- pattern: "Downloading artifacts from coordinator... not found"
failure_category: "artifacts_not_found_404"
- pattern: "curl: \\(7\\) Failed to connect to 127.0.0.1 port 3000 after"
failure_category: "rails-production-server-boot"
- pattern: "curl: \\(7\\) Failed to connect to 127.0.0.1 port 8080 after"
failure_category: "rails-production-server-boot"
- pattern: "Redis client could not fetch cluster information"
failure_category: "redis"
# CNG
- pattern: "=== block '.+' error ==="
failure_category: "cng"
- pattern: "failed to load command: orchestrator"
failure_category: "cng"
# E2E container images build
- pattern: "Building GDK image"
failure_category: "build_gdk_image"
- pattern: "Building QA image for"
failure_category: "build_qa_image"
# RSpec (loosely related)
- pattern: "We have detected a PG::QueryCanceled error in the specs, so we're failing early."
failure_category: "pg_query_canceled"
# Rake tasks
- pattern: "Feature::InvalidFeatureFlagError: "
failure_category: "rake_invalid_feature_flag"
- pattern: "New version of Sprockets detected. This patch can likely be removed."
failure_category: "rake_new_version_of_sprockets"
- pattern: "Don't know how to build task.+See the list of available tasks with"
failure_category: "rake_task_not_found"
- pattern: "Changes in worker queues found, please update the metadata by running"
failure_category: "rake_change_in_worker_queues"
# Other
- pattern: "Could not find downstream pipeline triggered via"
failure_category: "e2e:code-suggestions-eval"
- pattern: "Feature flag usage check failed"
failure_category: "feature_flag_usage_check_failure"
- pattern: "Job execution will continue but no more output will be collected"
failure_category: "logs_too_big_to_analyze"
# Important: The order of failure categories matter here. It's going to match from top to bottom.
failure_categories:
danger:
description: "Issues with the Danger code review bot that checks MRs for common problems. Often seen when the 'danger-review' job needs to be retried."
patterns:
- "If needed, you can retry the.+`danger-review` job"
db_migrations:
description: "Database migration failures, including schema inconsistencies, rollback issues, pending migrations, and column operation errors. These issues often occur when database changes aren't properly synchronized."
patterns:
- "Error: rollback of added migrations does not revert db/structure.sql to previous state, please investigate"
- "the committed db/structure.sql does not match the one generated by running added migrations"
- "the committed files in db/schema_migrations do not match those expected by the added migrations"
- "You have.+pending migrations"
- "Column operations, like dropping, renaming or primary key conversion"
- "createdb: error:"
- "Batched migration should be finalized only after at-least one required stop from queuing it"
db_table_write_protected:
description: "Attempts to write to database tables that are protected within the GitLab database schema, or unsupported cross-joins between database tables. This usually happens when code tries to modify tables it shouldn't have access to."
patterns:
- "Table.+is write protected within this Gitlab database"
- "Unsupported cross-join across"
db_cross_schema_access:
description: "Unauthorized cross-schema database access attempts, which occur when code tries to access tables outside the allowed schemas for the current database connection."
patterns:
- "Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection::CrossSchemaAccessError"
db_connection_in_rails_initializer:
description: "Database connections being made during Rails initializers, which is discouraged as it can cause race conditions and other issues during application startup."
patterns:
- "raise_database_connection_made_error"
rails_pg_not_in_database_dictionary:
description: "Tables missing from the database dictionary, which maintains metadata about database tables. New or deleted tables must be properly registered in the dictionary."
patterns:
- "Any new or deleted tables must be added to the database dictionary"
rails_pg_no_foreign_key:
description: "Missing foreign key constraints in the database schema, which can lead to data integrity issues. This typically happens during migration rollbacks or schema changes."
patterns:
- "has no foreign key for"
rails_pg_active_sql_transaction:
description: "Postgres errors related to operations that cannot be performed inside a transaction block, such as creating indexes concurrently."
patterns:
- "PG::ActiveSqlTransaction"
rails_pg_check_violation:
description: "Violations of PostgreSQL check constraints on database tables, which prevent invalid data from being inserted."
patterns:
- "PG::CheckViolation"
rails_pg_dependent_objects_still_exist:
description: "Attempts to drop database objects that have dependencies, such as constraints or references from other tables."
patterns:
- "PG::DependentObjectsStillExist"
rails_pg_duplicate_alias:
description: "SQL query errors due to duplicate table aliases, typically occurring in complex queries with the same table name specified multiple times."
patterns:
- "PG::DuplicateAlias"
rails_pg_duplicate_table:
description: "Attempts to create tables that already exist in the database, usually during migration rollbacks that aren't properly checking for existing tables."
patterns:
- "PG::DuplicateTable"
rails_pg_invalid_column_reference:
description: "SQL syntax errors related to invalid column references, such as ordering by columns not in the select list in a DISTINCT query."
patterns:
- "PG::InvalidColumnReference"
rails_pg_undefined_column:
description: "References to columns that don't exist in database tables, typically occurring during schema changes or mismatched migrations."
patterns:
- "PG::UndefinedColumn"
rails_pg_undefined_table:
description: "References to tables that don't exist in the database, often seen during migration rollbacks or when tables are renamed/dropped."
patterns:
- "PG::UndefinedTable"
rails_pg_sidekiq:
description: "Sidekiq API routing errors in the database context, particularly related to unrouted Sidekiq Redis calls that should be inside a .via block."
patterns:
- "Gitlab::SidekiqSharding::Validator::UnroutedSidekiqApiError"
psql_failed_command:
description: "Failures when executing PostgreSQL commands directly through psql, often seen during schema loading or database initialization."
patterns:
- "psql:.+ERROR:"
rake_unallowed_schemas_accessed:
description: "Unauthorized access attempts to restricted database schemas during rake tasks, which can happen when code tries to access tables outside its allowed scope."
patterns:
- "Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DMLAccessDeniedError:"
rake_enqueue_from_transaction:
description: "Attempts to enqueue Sidekiq jobs from within database transactions, which can lead to race conditions if the job runs before the transaction is committed."
patterns:
- "Sidekiq::Job::EnqueueFromTransactionError"
rake_db_unknown_schema:
description: "References to undefined database schemas in rake tasks, usually when configuration files are missing schema definitions."
patterns:
- "Gitlab::Database::GitlabSchema::UnknownSchemaError"
rake_rails_unknown_primary_key:
description: "Missing primary key definitions in ActiveRecord models, which can cause issues with record identification and association management."
patterns:
- "ActiveRecord::UnknownPrimaryKey"
error_in_db_migration:
description: "General errors occurring during database migrations that cause the migration and all subsequent migrations to be canceled."
patterns:
- "An error has occurred, this and all later migrations canceled"
- "An error has occurred, all later migrations canceled"
rails_invalid_sql_statement:
description: "Invalid SQL statements in Rails ActiveRecord operations that cannot be executed by PostgreSQL due to syntax or semantic errors."
patterns:
- "ActiveRecord::StatementInvalid"
graphql_lint:
description: "GraphQL schema validation and linting errors, including outdated schema files that need to be regenerated or queries that fail validation."
patterns:
- "needs to be regenerated, please run:"
- "GraphQL quer.+out of.+failed validation:"
eslint:
description: "JavaScript code style and quality issues detected by ESLint, the JavaScript linter used in GitLab's frontend development."
patterns:
- "node scripts/frontend/eslint.js . --format gitlab"
- "Running ESLint with the following rules enabled"
docs_lint_failed:
description: "Documentation linting failures, including formatting issues, broken links, and other quality checks for GitLab's documentation."
patterns:
- "ERROR: lint test\\(s\\) failed.+Review the log carefully to see full listing"
- "files inspected,.+lints? detected"
- "Issues found in .+input.+Find details below."
- "scripts/lint-docs-redirects.rb"
- "git diff --exit-code db/docs"
docs_outdated:
description: "Outdated documentation that needs to be regenerated, typically seen when code changes affect documented features or APIs."
patterns:
- "documentation is outdated.+Please update it by running"
cells_lint:
description: "Linting failures in Cells-related code and configuration, particularly around application settings definition files."
patterns:
- "scripts/cells/ci-ensure-application-settings-have-definition-file.rb"
pajamas_violations:
description: "Violations of Pajamas design system requirements, GitLab's design system that ensures consistent UI components and experiences."
patterns:
- "blocking Pajamas violation\\(s\\) found."
- "Merge request scan exit status: 2"
yaml_lint_failed:
description: "YAML syntax and formatting issues detected by yamllint, which checks for problems in YAML configuration files."
patterns:
- "yamllint "
rake_some_po_files_invalid:
description: "Invalid translation files (PO files) detected during rake tasks, usually containing syntax errors or formatting issues."
patterns:
- "Not all PO-files are valid"
rake_outdated_translated_strings:
description: "Outdated translation strings that need to be updated to match changes in the source language strings."
patterns:
- "Changes in translated strings found, please update file"
docs_deprecations_outdated:
description: "Outdated documentation about deprecated features that needs to be updated to reflect current deprecation status."
patterns:
- "ERROR: Deprecations documentation is outdated"
frontend_lockfile:
description: "Issues with frontend dependency lockfiles, including Yarn lockfile inconsistencies that need to be resolved."
patterns:
- "Found problems with the lockfile"
- "Your lockfile needs to be updated, but yarn was run with"
yarn_dependency_violation:
description: "Peer dependency violations in Yarn packages, where installed packages don't meet the version requirements of their dependents."
patterns:
- "Peer dependency violation"
yarn_run:
description: "Failures in Yarn script execution, typically in frontend build, test, or lint commands."
patterns:
- "yarn run.+failed with the following error"
gemfile_issues:
description: "Issues with Ruby gem dependencies and Gemfile lockfiles, including outdated dependencies, checksum mismatches, and conflicting gems."
patterns:
- "changed, but the lockfile can't be updated"
- "Your lockfile does not satisfy dependencies of"
- "contains outdated dependencies"
- "You have already activated"
- "but your Gemfile requires"
- "\\(r-\\)generate Gemfile.checksum with"
- "Bundler cannot continue installing"
- "Cached checksum for .+ not found"
gems_not_found:
description: "Missing Ruby gems required by the application, which can happen when dependencies aren't properly installed or configured."
patterns:
- "Bundler::GemNotFound"
gems_build:
description: "Failures during gem native extension building, which often occur with gems that have C extensions that fail to compile."
patterns:
- "Gem::Ext::BuildError: ERROR: Failed to build gem native extension."
bao_linux_checksum_mismatch:
description: "Checksum verification failures for the bao-linux-amd64 binary, used for OpenBao secrets management in GitLab."
patterns:
- "ERROR: Checksum mismatch for `bao-linux-amd64`"
gemnasium-python-dependency_scanning:
description: "Failures in Python dependency scanning with Gemnasium, typically related to pipenv sync issues in dependency scanning jobs."
patterns:
- "running /usr/local/bin/pipenv sync .+: exit status 1"
- "\\[gemnasium-python\\] .+ pipenv sync failed"
dependency-scanning_permission_denied:
description: "Permission issues during dependency scanning, where the scanner cannot access files due to insufficient permissions."
patterns:
- "\\[FATA\\] \\[dependency-scanning\\].+ permission denied"
package_hunter:
description: "Errors in package monitoring and tracking services, used to scan for vulnerabilities in project dependencies."
patterns:
- "Error calling /monitor/project/"
git_issues:
description: "Git repository and version control related failures, including cloning issues, reference problems, and connectivity errors."
patterns:
- "cloning repository: exit status 128"
- "did not match any file\\(s\\) known to git"
- "fatal: couldn't find remote ref"
- "fatal: expected flush after ref listing"
- "fatal: fetch-pack: invalid index-pack output"
- "fatal: Not a valid object name"
- "fatal: protocol error: bad pack header"
- "fatal: the remote end hung up unexpectedly"
- "TimeoutExpired: Command '\\['git', 'fetch'"
as_if_foss_git_push_issues:
description: "Git push failures in the as-if-FOSS pipeline, which creates a mirror of the GitLab codebase without EE-specific code."
patterns:
- "failed to push some refs to 'https://gitlab.com/gitlab-org/gitlab-foss.git'"
rubocop:
description: "Ruby code style and quality issues detected by RuboCop, the Ruby linter and static code analyzer used in GitLab's backend development."
patterns:
- "offenses? detected"
- "=== Filtered warnings ==="
jest:
description: "JavaScript test failures in Jest test suites, used for testing GitLab's frontend code."
patterns:
- "Command .+ node_modules/.bin/jest.+ exited with status 1"
rspec_undercoverage:
description: "Insufficient test coverage detected in the codebase, where methods or classes lack adequate test coverage."
patterns:
- "some methods have no test coverage!"
gitaly_spawn_failed:
description: "Failures in spawning Gitaly service processes, which handle Git operations in GitLab."
patterns:
- "gitaly spawn failed"
apollo:
description: "Issues with Apollo GraphQL client configuration or operation, used for frontend GraphQL interactions."
patterns:
- "Loading Apollo Project"
rspec_test_already_failed_on_default_branch:
description: "Tests that are already failing on the default branch, indicated by exit code 112. These failures are not introduced by the current changes."
patterns:
- "ERROR: Job failed: exit code 112"
assets_compilation:
description: "Failures during frontend asset compilation with webpack, used to bundle JavaScript, CSS, and other assets."
patterns:
- "Error: Unable to compile webpack production bundle"
webpack_cli:
description: "Webpack CLI execution errors, typically related to file system operations during the build process."
patterns:
- "webpack-cli.+Error: EEXIST: file already exists"
vuejs3:
description: "Compatibility issues with Vue.js 3 migrations, as GitLab transitions from Vue 2 to Vue 3 in its frontend code."
patterns:
- "Expected unset environment variable"
- "either now pass under Vue 3, or no longer exist"
rspec_usage:
description: "Improper usage of RSpec testing framework features, including issues with doubles, shared contexts, and other testing patterns."
patterns:
- "The use of doubles or partial doubles from rspec-mocks outside of the per-test lifecycle is not supported."
- "Could not find shared context"
- "Could not find shared examples"
- "is not available on an example group"
- "WebMock::NetConnectNotAllowedError"
gitlab_too_much_load:
description: "Situations where GitLab instance is under excessive load and unable to handle requests, typically seen in pipeline or API interactions."
patterns:
- "GitLab is currently unable to handle this request due to load."
failed_to_pull_image:
description: "Docker image pull failures in CI/CD, where container images cannot be downloaded from the registry."
patterns:
- "ERROR: Job failed: failed to pull image"
docker_not_running:
description: "Issues where the Docker daemon is not running or is unavailable, preventing container operations."
patterns:
- "Is the docker daemon running"
no_space_left:
description: "Insufficient disk space on the CI/CD runner, causing file operations to fail due to lack of storage."
patterns:
- "no space left on device"
- "There was insufficient space remaining on the device"
artifacts_upload_502:
description: "Bad Gateway errors (HTTP 502) during CI/CD artifact uploads, typically due to network or server issues."
patterns:
- "Uploading artifacts .+ 502 Bad Gateway"
http_500:
description: "HTTP 500 Internal Server errors when interacting with web services, indicating server-side problems."
patterns:
- "500 Internal Server Error"
kubernetes:
description: "Kubernetes cluster connectivity or operation issues, affecting containerized deployments and tests."
patterns:
- "Error: Kubernetes cluster unreachable"
http_502:
description: "HTTP 502 Bad Gateway errors when interacting with web services, indicating proxy or intermediate server issues."
patterns:
- "502 Server Error"
- "502 \"Bad Gateway\""
- "status code: 502"
gitlab_unavailable:
description: "Situations where GitLab instance is unavailable or unresponsive, preventing API requests or Git operations."
patterns:
- "The requested URL returned error: 500"
- "GitLab is not responding"
- "fatal: unable to access 'https://gitlab.com"
postgresql_unavailable:
description: "PostgreSQL database connection failures, where the database is unreachable or returns connection errors."
patterns:
- "PG::ConnectionBad"
artifacts_not_found_404:
description: "CI/CD artifacts not found (HTTP 404 errors), typically when trying to download artifacts from previous jobs that don't exist."
patterns:
- "Downloading artifacts from coordinator... not found"
rails-production-server-boot:
description: "Rails production server boot failures, where the application server fails to start or respond to requests on expected ports."
patterns:
- "curl: \\(7\\) Failed to connect to 127.0.0.1 port 3000 after"
- "curl: \\(7\\) Failed to connect to 127.0.0.1 port 8080 after"
redis:
description: "Redis connection or operation issues, affecting caching, queuing, and other Redis-dependent services."
patterns:
- "Redis client could not fetch cluster information"
cng:
description: "Cloud Native GitLab container image issues, affecting containerized GitLab deployments and related tools."
patterns:
- "=== block '.+' error ==="
- "failed to load command: orchestrator"
build_gdk_image:
description: "Failures during GitLab Development Kit image building, used for local development environments."
patterns:
- "Building GDK image"
build_qa_image:
description: "Failures during QA image building, used for end-to-end testing of GitLab."
patterns:
- "Building QA image for"
pg_query_canceled:
description: "PostgreSQL query cancellation errors in tests, typically due to long-running queries or timeout configurations."
patterns:
- "We have detected a PG::QueryCanceled error in the specs, so we're failing early."
rake_invalid_feature_flag:
description: "Invalid feature flag configurations detected, such as improper default settings or missing definition files."
patterns:
- "Feature::InvalidFeatureFlagError: "
rake_new_version_of_sprockets:
description: "Outdated Sprockets asset pipeline patching that is no longer needed with newer versions of Sprockets."
patterns:
- "New version of Sprockets detected. This patch can likely be removed."
rake_task_not_found:
description: "Referenced rake tasks that don't exist, typically due to typos or removed tasks."
patterns:
- "Don't know how to build task.+See the list of available tasks with"
rake_change_in_worker_queues:
description: "Changes detected in Sidekiq worker queue configurations that require metadata updates."
patterns:
- "Changes in worker queues found, please update the metadata by running"
e2e:code-suggestions-eval:
description: "End-to-end test failures in code suggestions evaluation, particularly when downstream pipelines can't be found."
patterns:
- "Could not find downstream pipeline triggered via"
feature_flag_usage_check_failure:
description: "Feature flag usage check failures, where feature flags are not properly defined or used."
patterns:
- "Feature flag usage check failed"
logs_too_big_to_analyze:
description: "Log output exceeded size limits for complete analysis, truncating job logs and potentially hiding important information."
patterns:
- "Job execution will continue but no more output will be collected"