Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-03-04 12:12:09 +00:00
parent e5a0db2327
commit 6aca5a3bed
81 changed files with 2024 additions and 349 deletions

View File

@ -3430,7 +3430,6 @@ Gitlab/BoundedContexts:
- 'ee/app/workers/concerns/elastic/migration_helper.rb'
- 'ee/app/workers/concerns/elastic/migration_obsolete.rb'
- 'ee/app/workers/concerns/elastic/migration_options.rb'
- 'ee/app/workers/concerns/elastic/migration_remove_fields_helper.rb'
- 'ee/app/workers/concerns/elastic/migration_state.rb'
- 'ee/app/workers/concerns/elastic/migration_update_mappings_helper.rb'
- 'ee/app/workers/concerns/geo_backoff_delay.rb'

View File

@ -1038,7 +1038,6 @@ RSpec/NamedSubject:
- 'ee/spec/workers/concerns/elastic/migration_helper_spec.rb'
- 'ee/spec/workers/concerns/elastic/migration_obsolete_spec.rb'
- 'ee/spec/workers/concerns/elastic/migration_options_spec.rb'
- 'ee/spec/workers/concerns/elastic/migration_remove_fields_helper_spec.rb'
- 'ee/spec/workers/create_github_webhook_worker_spec.rb'
- 'ee/spec/workers/deployments/auto_rollback_worker_spec.rb'
- 'ee/spec/workers/dora/daily_metrics/refresh_worker_spec.rb'

View File

@ -46,7 +46,6 @@ Search/NamespacedClass:
- 'ee/app/workers/concerns/elastic/migration_helper.rb'
- 'ee/app/workers/concerns/elastic/migration_obsolete.rb'
- 'ee/app/workers/concerns/elastic/migration_options.rb'
- 'ee/app/workers/concerns/elastic/migration_remove_fields_helper.rb'
- 'ee/app/workers/concerns/elastic/migration_state.rb'
- 'ee/app/workers/concerns/elastic/migration_update_mappings_helper.rb'
- 'ee/app/workers/elastic/migration_worker.rb'

View File

@ -1 +1 @@
6a9a82f3a1d6f8a7adf7ebd7e00c56fc81221ab0
bf993d121acd0533791e294a678a06180933ea89

View File

@ -56,6 +56,15 @@ export default {
workItemIconName() {
return this.workItem?.workItemType?.iconName;
},
workItemMovedToWorkItemUrl() {
return this.workItem?.movedToWorkItemUrl;
},
workItemDuplicatedToWorkItemUrl() {
return this.workItem?.duplicatedToWorkItemUrl;
},
workItemPromotedToEpicUrl() {
return this.workItem?.promotedToEpicUrl;
},
isDiscussionLocked() {
return findNotesWidget(this.workItem)?.discussionLocked;
},
@ -93,7 +102,13 @@ export default {
<gl-loading-icon inline />
</div>
<div v-else class="gl-mb-3 gl-mt-3 gl-text-subtle">
<work-item-state-badge v-if="workItemState" :work-item-state="workItemState" />
<work-item-state-badge
v-if="workItemState"
:work-item-state="workItemState"
:duplicated-to-work-item-url="workItemDuplicatedToWorkItemUrl"
:moved-to-work-item-url="workItemMovedToWorkItemUrl"
:promoted-to-epic-url="workItemPromotedToEpicUrl"
/>
<gl-loading-icon v-if="updateInProgress" inline />
<confidentiality-badge
v-if="isWorkItemConfidential"

View File

@ -36,6 +36,7 @@ import {
WIDGET_TYPE_WEIGHT,
WIDGET_TYPE_DEVELOPMENT,
STATE_OPEN,
WIDGET_TYPE_ERROR_TRACKING,
WIDGET_TYPE_ITERATION,
WIDGET_TYPE_MILESTONE,
WORK_ITEM_TYPE_VALUE_INCIDENT,
@ -73,6 +74,7 @@ import WorkItemDescription from './work_item_description.vue';
import WorkItemNotes from './work_item_notes.vue';
import WorkItemAwardEmoji from './work_item_award_emoji.vue';
import WorkItemRelationships from './work_item_relationships/work_item_relationships.vue';
import WorkItemErrorTracking from './work_item_error_tracking.vue';
import WorkItemStickyHeader from './work_item_sticky_header.vue';
import WorkItemAncestors from './work_item_ancestors/work_item_ancestors.vue';
import WorkItemTitle from './work_item_title.vue';
@ -115,6 +117,7 @@ export default {
WorkItemTree,
WorkItemNotes,
WorkItemRelationships,
WorkItemErrorTracking,
WorkItemStickyHeader,
WorkItemAncestors,
WorkItemTitle,
@ -398,6 +401,9 @@ export default {
workItemAwardEmoji() {
return this.findWidget(WIDGET_TYPE_AWARD_EMOJI);
},
workItemErrorTrackingIdentifier() {
return this.findWidget(WIDGET_TYPE_ERROR_TRACKING)?.identifier;
},
workItemHierarchy() {
return this.findWidget(WIDGET_TYPE_HIERARCHY);
},
@ -1036,6 +1042,12 @@ export default {
/>
</aside>
<work-item-error-tracking
v-if="workItemErrorTrackingIdentifier"
:full-path="workItemFullPath"
:identifier="workItemErrorTrackingIdentifier"
/>
<design-widget
v-if="hasDesignWidget"
:class="{ 'gl-mt-0': isDrawer }"

View File

@ -221,7 +221,7 @@ export default {
? confidentialMergeRequestState.selectedProject.pathWithNamespace
: this.workItemFullPath,
workItemIid: this.workItemIid,
sourceBranch: this.defaultBranch,
sourceBranch: this.sourceName,
targetBranch: this.branchName,
});
@ -260,7 +260,7 @@ export default {
: this.workItemFullPath,
workItemIid: this.workItemIid,
sourceBranch: this.branchName,
targetBranch: this.defaultBranch,
targetBranch: this.sourceName,
});
/** open the merge request once we have it created */
@ -383,6 +383,7 @@ export default {
<gl-form-input
id="source-name-id"
v-model.trim="sourceName"
data-testid="source-name"
:state="!invalidSource"
required
name="source-name"
@ -405,6 +406,7 @@ export default {
<gl-form-input
id="branch-name-id"
v-model.trim="branchName"
data-testid="target-name"
:state="!invalidBranch"
:disabled="isLoading || creatingBranch"
required

View File

@ -0,0 +1,85 @@
<script>
import { GlLoadingIcon } from '@gitlab/ui';
import { createAlert } from '~/alert';
import Stacktrace from '~/error_tracking/components/stacktrace.vue';
import service from '~/error_tracking/services';
import Poll from '~/lib/utils/poll';
import { __ } from '~/locale';
export default {
components: {
GlLoadingIcon,
Stacktrace,
},
props: {
fullPath: {
type: String,
required: true,
},
identifier: {
type: String,
required: true,
},
},
data() {
return {
loading: false,
stackTraceData: {},
};
},
computed: {
stackTraceEntries() {
return this.stackTraceData.stack_trace_entries?.toReversed() ?? [];
},
stackTracePath() {
return `/${this.fullPath}/-/error_tracking/${this.identifier}/stack_trace.json`;
},
},
mounted() {
this.startPolling(this.stackTracePath);
},
beforeDestroy() {
this.stackTracePoll?.stop();
},
methods: {
startPolling(endpoint) {
this.loading = true;
this.stackTracePoll = new Poll({
resource: service,
method: 'getSentryData',
data: { endpoint },
successCallback: ({ data }) => {
if (!data) {
return;
}
this.stackTraceData = data.error;
this.stackTracePoll.stop();
this.loading = false;
},
errorCallback: () => {
createAlert({ message: __('Failed to load stacktrace.') });
this.loading = false;
},
});
this.stackTracePoll.makeRequest();
},
},
};
</script>
<template>
<div>
<div :class="{ 'gl-border-b-0': loading }" class="card card-slim gl-mb-0 gl-mt-5">
<div class="card-header gl-border-b-0">
<h2 class="card-title gl-my-2 gl-text-base">{{ __('Stack trace') }}</h2>
</div>
</div>
<div v-if="loading" class="card gl-mb-0">
<gl-loading-icon class="gl-my-3" />
</div>
<stacktrace v-else :entries="stackTraceEntries" />
</div>
</template>

View File

@ -1,11 +1,13 @@
<script>
import { GlBadge } from '@gitlab/ui';
import { __ } from '~/locale';
import { GlBadge, GlLink, GlSprintf } from '@gitlab/ui';
import { __, s__ } from '~/locale';
import { STATE_OPEN } from '../constants';
export default {
components: {
GlBadge,
GlLink,
GlSprintf,
},
props: {
workItemState: {
@ -17,14 +19,26 @@ export default {
required: false,
default: true,
},
movedToWorkItemUrl: {
type: String,
required: false,
default: '',
},
duplicatedToWorkItemUrl: {
type: String,
required: false,
default: '',
},
promotedToEpicUrl: {
type: String,
required: false,
default: '',
},
},
computed: {
isWorkItemOpen() {
return this.workItemState === STATE_OPEN;
},
stateText() {
return this.isWorkItemOpen ? __('Open') : __('Closed');
},
workItemStateIcon() {
if (!this.showIcon) {
return null;
@ -35,12 +49,43 @@ export default {
workItemStateVariant() {
return this.isWorkItemOpen ? 'success' : 'info';
},
statusText() {
if (this.isWorkItemOpen) {
return __('Open');
}
if (this.closedStatusLink) {
return s__('IssuableStatus|Closed (%{link})');
}
return __('Closed');
},
closedStatusLink() {
return this.duplicatedToWorkItemUrl || this.movedToWorkItemUrl || this.promotedToEpicUrl;
},
closedStatusText() {
if (this.duplicatedToWorkItemUrl) {
return s__('IssuableStatus|duplicated');
}
if (this.movedToWorkItemUrl) {
return s__('IssuableStatus|moved');
}
if (this.promotedToEpicUrl) {
return s__('IssuableStatus|promoted');
}
return '';
},
},
};
</script>
<template>
<gl-badge :variant="workItemStateVariant" :icon="workItemStateIcon" class="gl-align-middle">
{{ stateText }}
<gl-sprintf v-if="closedStatusLink" :message="statusText">
<template #link>
<gl-link class="!gl-text-inherit gl-underline" :href="closedStatusLink">{{
closedStatusText
}}</gl-link>
</template>
</gl-sprintf>
<template v-else>{{ statusText }}</template>
</gl-badge>
</template>

View File

@ -146,7 +146,13 @@ export default {
<div
class="work-item-sticky-header-text gl-mx-auto gl-flex gl-items-center gl-gap-3 gl-px-5 xl:gl-px-6"
>
<work-item-state-badge v-if="workItemState" :work-item-state="workItemState" />
<work-item-state-badge
v-if="workItemState"
:work-item-state="workItemState"
:promoted-to-epic-url="workItem.promotedToEpicUrl"
:duplicated-to-work-item-url="workItem.duplicatedToWorkItemUrl"
:moved-to-work-item-url="workItem.movedToWorkItemUrl"
/>
<gl-loading-icon v-if="updateInProgress" />
<confidentiality-badge
v-if="workItem.confidential"

View File

@ -11,6 +11,7 @@ export const TRACKING_CATEGORY_SHOW = 'workItems:show';
export const WIDGET_TYPE_ASSIGNEES = 'ASSIGNEES';
export const WIDGET_TYPE_DESCRIPTION = 'DESCRIPTION';
export const WIDGET_TYPE_ERROR_TRACKING = 'ERROR_TRACKING';
export const WIDGET_TYPE_AWARD_EMOJI = 'AWARD_EMOJI';
export const WIDGET_TYPE_NOTIFICATIONS = 'NOTIFICATIONS';
export const WIDGET_TYPE_CURRENT_USER_TODOS = 'CURRENT_USER_TODOS';

View File

@ -1,4 +1,4 @@
#import "./work_item.fragment.graphql"
#import "ee_else_ce/work_items/graphql/work_item.fragment.graphql"
#import "./work_item_linked_items.fragment.graphql"
mutation addLinkedItems($input: WorkItemAddLinkedItemsInput!) {

View File

@ -569,6 +569,9 @@ export const setNewWorkItemCache = async (
webUrl: `${baseURL}/groups/gitlab-org/-/work_items/new`,
reference: '',
createNoteEmail: null,
movedToWorkItemUrl: null,
duplicatedToWorkItemUrl: null,
promotedToEpicUrl: null,
project: null,
namespace: {
id: newWorkItemPath,

View File

@ -1,4 +1,4 @@
#import "./work_item.fragment.graphql"
#import "ee_else_ce/work_items/graphql/work_item.fragment.graphql"
mutation createWorkItem($input: WorkItemCreateInput!) {
workItemCreate(input: $input) {

View File

@ -1,5 +1,7 @@
query getWorkItemStateCounts(
$excludeProjects: Boolean = false
$includeDescendants: Boolean = true
$isGroup: Boolean = true
$fullPath: ID!
$search: String
$sort: WorkItemSort
@ -16,11 +18,11 @@ query getWorkItemStateCounts(
$in: [IssuableSearchableField!]
$not: NegatedWorkItemFilterInput
$or: UnionedWorkItemFilterInput
$isGroup: Boolean = true
) {
group(fullPath: $fullPath) @include(if: $isGroup) {
id
workItemStateCounts(
excludeProjects: $excludeProjects
includeDescendants: $includeDescendants
search: $search
sort: $sort

View File

@ -2,6 +2,9 @@
#import "ee_else_ce/work_items/graphql/list/work_item_widgets.fragment.graphql"
query getWorkItems(
$excludeProjects: Boolean = false
$includeDescendants: Boolean = true
$isGroup: Boolean = true
$fullPath: ID!
$search: String
$sort: WorkItemSort
@ -22,15 +25,13 @@ query getWorkItems(
$beforeCursor: String
$firstPageSize: Int
$lastPageSize: Int
$isGroup: Boolean = true
$excludeProjects: Boolean
) {
group(fullPath: $fullPath) @include(if: $isGroup) {
id
name
workItems(
excludeProjects: $excludeProjects
includeDescendants: true
includeDescendants: $includeDescendants
search: $search
sort: $sort
state: $state

View File

@ -1,4 +1,4 @@
#import "./work_item.fragment.graphql"
#import "ee_else_ce/work_items/graphql/work_item.fragment.graphql"
mutation localUpdateWorkItem($input: LocalUpdateWorkItemInput) {
localUpdateWorkItem(input: $input) @client {

View File

@ -14,6 +14,8 @@ fragment WorkItem on WorkItem {
webUrl
reference(full: true)
createNoteEmail
movedToWorkItemUrl
duplicatedToWorkItemUrl
project {
id
}

View File

@ -1,4 +1,4 @@
#import "./work_item.fragment.graphql"
#import "ee_else_ce/work_items/graphql/work_item.fragment.graphql"
query workItemById($id: WorkItemID!) {
workItem(id: $id) {

View File

@ -1,4 +1,4 @@
#import "./work_item.fragment.graphql"
#import "ee_else_ce/work_items/graphql/work_item.fragment.graphql"
query namespaceWorkItem($fullPath: ID!, $iid: String!) {
workspace: namespace(fullPath: $fullPath) {

View File

@ -1,4 +1,4 @@
#import "./work_item.fragment.graphql"
#import "ee_else_ce/work_items/graphql/work_item.fragment.graphql"
mutation workItemConvert($input: WorkItemConvertInput!) {
workItemConvert(input: $input) {

View File

@ -1,4 +1,4 @@
#import "./work_item.fragment.graphql"
#import "ee_else_ce/work_items/graphql/work_item.fragment.graphql"
subscription workItemUpdated($id: WorkItemID!) {
workItemUpdated(workItemId: $id) {

View File

@ -111,4 +111,7 @@ fragment WorkItemWidgets on WorkItemWidget {
}
}
}
... on WorkItemWidgetErrorTracking {
identifier
}
}

View File

@ -255,6 +255,7 @@ export default {
return this.isGroup ? WORKSPACE_GROUP : WORKSPACE_PROJECT;
},
queryVariables() {
const hasGroupFilter = Boolean(this.urlFilterParams.group_path);
return {
fullPath: this.fullPath,
sort: this.sortKey,
@ -262,8 +263,8 @@ export default {
search: this.searchQuery,
...this.apiFilterParams,
...this.pageParams,
excludeProjects: this.isEpicsList,
includeDescendants: !this.apiFilterParams.fullPath,
excludeProjects: hasGroupFilter || this.isEpicsList,
includeDescendants: !hasGroupFilter,
types: this.apiFilterParams.types || this.workItemType || this.defaultWorkItemTypes,
isGroup: this.isGroup,
};

View File

@ -0,0 +1,42 @@
# frozen_string_literal: true
module Types
module Ci
module Inputs
class InputSpecType < BaseObject # rubocop:disable Graphql/AuthorizeTypes -- Authorization checked upstream
graphql_name 'CiInputSpec'
description 'Input for pipeline creation'
field :name, GraphQL::Types::String,
null: false,
description: 'Name of the input.'
field :type, Types::Ci::Inputs::InputTypeEnum,
null: false,
description: 'Input data type.'
field :description, GraphQL::Types::String,
null: true,
description: 'Description of the input.'
field :required, GraphQL::Types::Boolean,
null: false,
description: 'Indicates whether the input is required.',
method: :required?
field :default, Types::Ci::Inputs::ValueInputType,
null: true,
description: 'Default value for the input, if provided.'
field :options, Types::Ci::Inputs::ValueInputType,
null: true,
description: 'Possible values that the input can take, if provided.'
field :regex, GraphQL::Types::String,
null: true,
description: 'Regular expression pattern that the input value must match if provided.'
end
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module Types
module Ci
module Inputs
class InputTypeEnum < BaseEnum
graphql_name 'CiInputType'
description 'Available input types'
::Ci::PipelineCreation::Inputs::SpecInputs.input_types.each do |input_type|
value input_type.upcase, description: "#{input_type.capitalize} input", value: input_type
end
end
end
end
end

View File

@ -40,6 +40,17 @@ module Types
description: 'ID of the pipeline creation request.'
end
field :ci_pipeline_creation_inputs, [Types::Ci::Inputs::InputSpecType],
authorize: :create_pipeline,
null: true,
calls_gitaly: true,
experiment: { milestone: '17.10' },
description: 'Inputs to create a pipeline.' do
argument :ref, GraphQL::Types::String,
required: true,
description: 'Ref where to create the pipeline.'
end
field :full_path, GraphQL::Types::ID,
null: false,
description: 'Full path of the project.'
@ -914,6 +925,17 @@ module Types
project.container_repositories.size
end
def ci_pipeline_creation_inputs(ref:)
response = ::Ci::PipelineCreation::FindPipelineInputsService.new(
current_user: context[:current_user],
project: object,
ref: ref).execute
raise Gitlab::Graphql::Errors::ArgumentError, response.message if response.error?
response.payload[:inputs].all_inputs
end
def ci_config_variables(ref:)
result = ::Ci::ListConfigVariablesService.new(object, context[:current_user]).execute(ref)

View File

@ -2,7 +2,7 @@
module Ci
module PipelineCreation
class FindCiConfigSpecService
class FindPipelineInputsService
include Gitlab::Utils::StrongMemoize
# This service is used by the frontend to display inputs as an HTML form
@ -34,15 +34,14 @@ module Ci
# We need to read the uninterpolated YAML of the included file.
yaml_content = ::Gitlab::Ci::Config::Yaml.load!(project_config.content)
yaml_result = yaml_result_of_internal_include(yaml_content)
return error_response('invalid YAML config') unless yaml_result&.valid?
success_response(yaml_result.spec)
spec_inputs = Ci::PipelineCreation::Inputs::SpecInputs.new(yaml_result.spec[:inputs])
return error_response(spec_inputs.errors.join(', ')) if spec_inputs.errors.any?
success_response(spec_inputs)
else
# For now we do nothing. The unsupported case is `ProjectConfig::SecurityPolicyDefault`
# which is used when the project has no CI config explicitly defined but it's enforced
# by default using policies.
success_response({})
error_response('inputs not supported for this CI config source')
end
rescue ::Gitlab::Ci::Config::Yaml::LoadError => e
error_response("YAML load error: #{e.message}")
@ -52,8 +51,8 @@ module Ci
attr_reader :current_user, :project, :ref, :pipeline_source
def success_response(spec)
ServiceResponse.success(payload: { spec: spec })
def success_response(inputs)
ServiceResponse.success(payload: { inputs: inputs })
end
def error_response(message)

View File

@ -1,8 +1,9 @@
---
migration_job_name: BackfillProtectedEnvironmentDeployAccessLevelsProtectedEnvironmentProjectId
description: Backfills sharding key `protected_environment_deploy_access_levels.protected_environment_project_id` from `protected_environments`.
description: Backfills sharding key `protected_environment_deploy_access_levels.protected_environment_project_id`
from `protected_environments`.
feature_category: continuous_delivery
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/162834
milestone: '17.4'
queued_migration_version: 20240815083838
finalized_by: # version of the migration that finalized this BBM
finalized_by: '20250303231605'

View File

@ -8,14 +8,6 @@ description: Stores the variables used in pipeline schedules
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/d278da48f837292491aaf81649afef1da3a1eb09
milestone: '9.4'
gitlab_schema: gitlab_ci
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: pipeline_schedule_id
table: ci_pipeline_schedules
sharding_key: project_id
belongs_to: pipeline_schedule
desired_sharding_key_migration_job_name: BackfillCiPipelineScheduleVariablesProjectId
table_size: small
sharding_key:
project_id: projects

View File

@ -0,0 +1,12 @@
---
table_name: snippet_repository_states
classes:
- Geo::SnippetRepositoryState
feature_categories:
- geo_replication
description: Separate table for snippet repositories containing Geo verification metadata.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/181255
milestone: '17.10'
gitlab_schema: gitlab_main_cell
exempt_from_sharding: true
table_size: small

View File

@ -0,0 +1,34 @@
# frozen_string_literal: true
class CreateSnippetRepositoryStates < Gitlab::Database::Migration[2.2]
milestone '17.10'
def change
create_table :snippet_repository_states do |t|
t.datetime_with_timezone :verification_started_at
t.datetime_with_timezone :verification_retry_at
t.datetime_with_timezone :verified_at
t.bigint :snippet_repository_id, null: false
t.integer :verification_state, default: 0, limit: 2, null: false
t.integer :verification_retry_count, default: 0, limit: 2
t.binary :verification_checksum, using: 'verification_checksum::bytea'
t.text :verification_failure, limit: 255
t.index :snippet_repository_id, unique: true
t.index :verification_state, name: 'index_snippet_repository_states_on_verification_state'
t.index :verified_at,
where: "(verification_state = 0)",
order: { verified_at: 'ASC NULLS FIRST' },
name: 'index_snippet_repository_states_pending_verification'
t.index :verification_retry_at,
where: "(verification_state = 3)",
order: { verification_retry_at: 'ASC NULLS FIRST' },
name: 'index_snippet_repository_states_failed_verification'
t.index :verification_state,
where: "(verification_state = 0 OR verification_state = 3)",
name: 'index_snippet_repository_states_needs_verification'
end
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class AddForeignKeyToSnippetRepositoryStatesSnippetId < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.10'
def up
add_concurrent_foreign_key :snippet_repository_states,
:snippet_repositories,
column: :snippet_repository_id,
target_column: :snippet_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :snippet_repository_states, column: :snippet_repository_id
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddExperimentsToSecurityOrchestrationPolicyConfigurations < Gitlab::Database::Migration[2.2]
milestone '17.10'
def change
add_column :security_orchestration_policy_configurations, :experiments, :jsonb, null: false, default: {}
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddApprovalsProjectIdIndex < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.10'
INDEX_NAME = 'index_approvals_on_project_id'
def up
add_concurrent_index :approvals, :project_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :approvals, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddApprovalsProjectIdFk < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.10'
def up
add_concurrent_foreign_key :approvals, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :approvals, column: :project_id
end
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class AddCiPipelineScheduleVariablesProjectIdNotNull < Gitlab::Database::Migration[2.2]
milestone '17.10'
disable_ddl_transaction!
def up
add_not_null_constraint :ci_pipeline_schedule_variables, :project_id
end
def down
remove_not_null_constraint :ci_pipeline_schedule_variables, :project_id
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class FinalizeHkBackfillProtectedEnvironmentDeployAccessLevelsProtected66570 < Gitlab::Database::Migration[2.2]
milestone '17.10'
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
def up
ensure_batched_background_migration_is_finished(
job_class_name: 'BackfillProtectedEnvironmentDeployAccessLevelsProtectedEnvironmentProjectId',
table_name: :protected_environment_deploy_access_levels,
column_name: :id,
job_arguments: [:protected_environment_project_id, :protected_environments, :project_id,
:protected_environment_id],
finalize: true
)
end
def down; end
end

View File

@ -0,0 +1 @@
5addc61acbabe1bcd3bb74ab87ea4291546a6e693352d630e77006dee873f5c6

View File

@ -0,0 +1 @@
270e4a1ec37e327924489391588b5447005787dbcf88a295fd200895b100a40a

View File

@ -0,0 +1 @@
2631c6b29c8bc28fab1ad8665c076b55938ae7f168bd820bfe8df0327e5f39e5

View File

@ -0,0 +1 @@
2e50706103444c53aa8d1ade1664ac267253b0edbc3669cb1ce110045d335250

View File

@ -0,0 +1 @@
291a040f4b2e8cde1314c450761bcebac4e46f0fbb02be069bca6f3361b498f0

View File

@ -0,0 +1 @@
3902ba32b25f5d581b45d5071f55bdb1c9cddcf73338fb5156393e86fac8a71a

View File

@ -0,0 +1 @@
9405413ee762e9b5ad018de4c92b5613f19d5e85f81b35a7e69c537d3e8f43ba

View File

@ -10810,7 +10810,8 @@ CREATE TABLE ci_pipeline_schedule_variables (
updated_at timestamp with time zone,
variable_type smallint DEFAULT 1 NOT NULL,
raw boolean DEFAULT false NOT NULL,
project_id bigint
project_id bigint,
CONSTRAINT check_17806054a8 CHECK ((project_id IS NOT NULL))
);
CREATE SEQUENCE ci_pipeline_schedule_variables_id_seq
@ -21326,6 +21327,7 @@ CREATE TABLE security_orchestration_policy_configurations (
updated_at timestamp with time zone NOT NULL,
configured_at timestamp with time zone,
namespace_id bigint,
experiments jsonb DEFAULT '{}'::jsonb NOT NULL,
CONSTRAINT cop_configs_project_or_namespace_existence CHECK (((project_id IS NULL) <> (namespace_id IS NULL)))
);
@ -21724,6 +21726,28 @@ CREATE TABLE snippet_repositories (
CONSTRAINT snippet_repositories_verification_failure_text_limit CHECK ((char_length(verification_failure) <= 255))
);
CREATE TABLE snippet_repository_states (
id bigint NOT NULL,
verification_started_at timestamp with time zone,
verification_retry_at timestamp with time zone,
verified_at timestamp with time zone,
snippet_repository_id bigint NOT NULL,
verification_state smallint DEFAULT 0 NOT NULL,
verification_retry_count smallint DEFAULT 0,
verification_checksum bytea,
verification_failure text,
CONSTRAINT check_0dabaefb7f CHECK ((char_length(verification_failure) <= 255))
);
CREATE SEQUENCE snippet_repository_states_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE snippet_repository_states_id_seq OWNED BY snippet_repository_states.id;
CREATE TABLE snippet_repository_storage_moves (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -26212,6 +26236,8 @@ ALTER TABLE ONLY slack_integrations_scopes ALTER COLUMN id SET DEFAULT nextval('
ALTER TABLE ONLY smartcard_identities ALTER COLUMN id SET DEFAULT nextval('smartcard_identities_id_seq'::regclass);
ALTER TABLE ONLY snippet_repository_states ALTER COLUMN id SET DEFAULT nextval('snippet_repository_states_id_seq'::regclass);
ALTER TABLE ONLY snippet_repository_storage_moves ALTER COLUMN id SET DEFAULT nextval('snippet_repository_storage_moves_id_seq'::regclass);
ALTER TABLE ONLY snippet_user_mentions ALTER COLUMN id SET DEFAULT nextval('snippet_user_mentions_id_seq'::regclass);
@ -29129,6 +29155,9 @@ ALTER TABLE ONLY smartcard_identities
ALTER TABLE ONLY snippet_repositories
ADD CONSTRAINT snippet_repositories_pkey PRIMARY KEY (snippet_id);
ALTER TABLE ONLY snippet_repository_states
ADD CONSTRAINT snippet_repository_states_pkey PRIMARY KEY (id);
ALTER TABLE ONLY snippet_repository_storage_moves
ADD CONSTRAINT snippet_repository_storage_moves_pkey PRIMARY KEY (id);
@ -31811,6 +31840,8 @@ CREATE INDEX index_approval_rules_code_owners_rule_type ON approval_merge_reques
CREATE INDEX index_approvals_on_merge_request_id_and_created_at ON approvals USING btree (merge_request_id, created_at);
CREATE INDEX index_approvals_on_project_id ON approvals USING btree (project_id);
CREATE UNIQUE INDEX index_approvals_on_user_id_and_merge_request_id ON approvals USING btree (user_id, merge_request_id);
CREATE INDEX index_approver_groups_on_group_id ON approver_groups USING btree (group_id);
@ -35209,6 +35240,16 @@ CREATE INDEX index_snippet_repositories_pending_verification ON snippet_reposito
CREATE INDEX index_snippet_repositories_verification_state ON snippet_repositories USING btree (verification_state);
CREATE INDEX index_snippet_repository_states_failed_verification ON snippet_repository_states USING btree (verification_retry_at NULLS FIRST) WHERE (verification_state = 3);
CREATE INDEX index_snippet_repository_states_needs_verification ON snippet_repository_states USING btree (verification_state) WHERE ((verification_state = 0) OR (verification_state = 3));
CREATE UNIQUE INDEX index_snippet_repository_states_on_snippet_repository_id ON snippet_repository_states USING btree (snippet_repository_id);
CREATE INDEX index_snippet_repository_states_on_verification_state ON snippet_repository_states USING btree (verification_state);
CREATE INDEX index_snippet_repository_states_pending_verification ON snippet_repository_states USING btree (verified_at NULLS FIRST) WHERE (verification_state = 0);
CREATE INDEX index_snippet_repository_storage_moves_on_snippet_id ON snippet_repository_storage_moves USING btree (snippet_id);
CREATE INDEX index_snippet_repository_storage_moves_on_snippet_organization_ ON snippet_repository_storage_moves USING btree (snippet_organization_id);
@ -39586,6 +39627,9 @@ ALTER TABLE ONLY csv_issue_imports
ALTER TABLE ONLY milestone_releases
ADD CONSTRAINT fk_5e73b8cad2 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY snippet_repository_states
ADD CONSTRAINT fk_5f750f3182 FOREIGN KEY (snippet_repository_id) REFERENCES snippet_repositories(snippet_id) ON DELETE CASCADE;
ALTER TABLE ONLY packages_conan_package_revisions
ADD CONSTRAINT fk_5f7c6a9244 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -40444,6 +40488,9 @@ ALTER TABLE ONLY user_member_roles
ALTER TABLE ONLY boards_epic_board_labels
ADD CONSTRAINT fk_cb8ded70e2 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY approvals
ADD CONSTRAINT fk_cbce403122 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY slack_integrations
ADD CONSTRAINT fk_cbe270434e FOREIGN KEY (integration_id) REFERENCES integrations(id) ON DELETE CASCADE;

View File

@ -44,9 +44,23 @@ This setup ensures enterprise-level privacy and flexibility, allowing seamless i
### Supported GitLab Duo features
#### Supported Code Suggestions features
The following table lists the GitLab Duo features, and whether they are available on GitLab Duo Self-Hosted or not.
For GitLab Duo Code Suggestions, you can use both [code generation and code completion](../../user/project/repository/code_suggestions/_index.md#code-completion-and-generation) with GitLab Duo Self-Hosted.
| Feature | Available on GitLab Duo Self-Hosted | GitLab version |
|----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------|----|
| [GitLab Duo Chat](../../user/gitlab_duo_chat/_index.md) | {{< icon name="check-circle-filled" >}} Yes | GitLab 17.9 and later |
| [Code Suggestions](../../user/project/repository/code_suggestions/_index.md) | {{< icon name="check-circle-filled" >}} Yes | GitLab 17.9 and later |
| [Code Explanation](../../user/project/repository/code_explain.md) | {{< icon name="check-circle-filled" >}} Yes | GitLab 17.9 and later |
| [Test Generation](../../user/gitlab_duo_chat/examples.md#write-tests-in-the-ide) | {{< icon name="check-circle-filled" >}} Yes | GitLab 17.9 and later |
| [Refactor Code](../../user/gitlab_duo_chat/examples.md#refactor-code-in-the-ide) | {{< icon name="check-circle-filled" >}} Yes | GitLab 17.9 and later |
| [Fix Code](../../user/gitlab_duo_chat/examples.md#fix-code-in-the-ide) | {{< icon name="check-circle-filled" >}} Yes | GitLab 17.9 and later |
| [AI Impact Dashboard](../../user/analytics/ai_impact_analytics.md) | {{< icon name="check-circle-dashed" >}} Beta | GitLab 17.9 and later |
| [Discussion Summary](../../user/discussions/_index.md#summarize-issue-discussions-with-duo-chat) | {{< icon name="dash-circle" >}} No | Not applicable |
| [GitLab Duo for the CLI](../../editor_extensions/gitlab_cli/_index.md#gitlab-duo-for-the-cli) | {{< icon name="dash-circle" >}} No | Not applicable |
| [Merge Commit Message Generation](../../user/project/merge_requests/duo_in_merge_requests.md#generate-a-merge-commit-message) | {{< icon name="dash-circle" >}} No | Not applicable |
| [Root Cause Analysis](../../user/gitlab_duo_chat/examples.md#troubleshoot-failed-cicd-jobs-with-root-cause-analysis) | {{< icon name="dash-circle" >}} No | Not applicable |
| [Vulnerability Explanation](../../user/application_security/vulnerabilities/_index.md#explaining-a-vulnerability) | {{< icon name="dash-circle" >}} No | Not applicable |
| [Vulnerability Resolution](../../user/application_security/vulnerabilities/_index.md#vulnerability-resolution) | {{< icon name="dash-circle" >}} No | Not applicable |
#### Supported Duo Chat features
@ -63,11 +77,7 @@ You can use the following GitLab Duo Chat features with GitLab Duo Self-Hosted:
- [Ask follow up questions](../../user/gitlab_duo_chat/examples.md#ask-follow-up-questions)
- [Ask about errors](../../user/gitlab_duo_chat/examples.md#ask-about-errors)
- [Ask about specific files](../../user/gitlab_duo_chat/examples.md#ask-about-specific-files)
- [Refactor code in the IDE](../../user/gitlab_duo_chat/examples.md#refactor-code-in-the-ide)
- [Fix code in the IDE](../../user/gitlab_duo_chat/examples.md#fix-code-in-the-ide)
- [Write tests in the IDE](../../user/gitlab_duo_chat/examples.md#write-tests-in-the-ide)
- [Ask about CI/CD](../../user/gitlab_duo_chat/examples.md#ask-about-cicd)
- [Use IDE slash commands](../../user/gitlab_duo_chat/examples.md#ide)
### Prerequisites

View File

@ -40,7 +40,7 @@ Support for the following GitLab-supported large language models (LLMs) is gener
| Model Family | Model | Supported Platforms | Code completion | Code generation | GitLab Duo Chat |
|-------------|-------|---------------------|-----------------|-----------------|-----------------|
| Mistral Codestral | [Codestral 22B v0.1](https://huggingface.co/mistralai/Codestral-22B-v0.1) | [vLLM](supported_llm_serving_platforms.md#for-self-hosted-model-deployments) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | N/A |
| Mistral Codestral | [Codestral 22B v0.1](https://huggingface.co/mistralai/Codestral-22B-v0.1) | [vLLM](supported_llm_serving_platforms.md#for-self-hosted-model-deployments) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | Not applicable |
| Mistral | [Mistral 7B-it v0.3](https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.3) | [vLLM](supported_llm_serving_platforms.md#for-self-hosted-model-deployments) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="dash-circle" >}} Not compatible |
| Mistral | [Mixtral 8x7B-it v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1) | [vLLM](supported_llm_serving_platforms.md#for-self-hosted-model-deployments), [AWS Bedrock](https://aws.amazon.com/bedrock/mistral/) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-dashed" >}} Largely compatible |
| Mistral | [Mixtral 8x22B-it v0.1](https://huggingface.co/mistralai/Mixtral-8x22B-Instruct-v0.1) | [vLLM](supported_llm_serving_platforms.md#for-self-hosted-model-deployments) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-dashed" >}} Largely compatible |
@ -48,6 +48,10 @@ Support for the following GitLab-supported large language models (LLMs) is gener
| GPT | [GPT-4 Turbo](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models?tabs=python-secure#gpt-4) | [Azure OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-dashed" >}} Largely compatible |
| GPT | [GPT-4o](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models?tabs=python-secure#gpt-4o-and-gpt-4-turbo) | [Azure OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible |
| GPT | [GPT-4o-mini](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models?tabs=python-secure#gpt-4o-and-gpt-4-turbo) | [Azure OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-dashed" >}} Largely compatible |
| Llama | [Llama 3 8B](https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct) | [AWS Bedrock](https://aws.amazon.com/bedrock/llama/) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | Not applicable |
| Llama | [Llama 3.1 8B](https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct) | [AWS Bedrock](https://aws.amazon.com/bedrock/llama/) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | Not applicable |
| Llama | [Llama 3 70B](https://huggingface.co/meta-llama/Meta-Llama-3-70B-Instruct) | [AWS Bedrock](https://aws.amazon.com/bedrock/llama/) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | Not applicable |
| Llama | [Llama 3.1 70B](https://huggingface.co/meta-llama/Llama-3.1-70B-Instruct) | [AWS Bedrock](https://aws.amazon.com/bedrock/llama/) | {{< icon name="check-circle-filled" >}} Fully compatible | {{< icon name="check-circle-filled" >}} Fully compatible | Not applicable |
### Experimental and beta models

View File

@ -21834,6 +21834,22 @@ CI/CD variables for a group.
| <a id="cigroupvariablevalue"></a>`value` | [`String`](#string) | Value of the variable. |
| <a id="cigroupvariablevariabletype"></a>`variableType` | [`CiVariableType`](#civariabletype) | Type of the variable. |
### `CiInputSpec`
Input for pipeline creation.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="ciinputspecdefault"></a>`default` | [`CiInputsValueInputType`](#ciinputsvalueinputtype) | Default value for the input, if provided. |
| <a id="ciinputspecdescription"></a>`description` | [`String`](#string) | Description of the input. |
| <a id="ciinputspecname"></a>`name` | [`String!`](#string) | Name of the input. |
| <a id="ciinputspecoptions"></a>`options` | [`CiInputsValueInputType`](#ciinputsvalueinputtype) | Possible values that the input can take, if provided. |
| <a id="ciinputspecregex"></a>`regex` | [`String`](#string) | Regular expression pattern that the input value must match if provided. |
| <a id="ciinputspecrequired"></a>`required` | [`Boolean!`](#boolean) | Indicates whether the input is required. |
| <a id="ciinputspectype"></a>`type` | [`CiInputType!`](#ciinputtype) | Input data type. |
### `CiInstanceVariable`
CI/CD variables for a GitLab instance.
@ -33884,6 +33900,23 @@ Returns [`[CiConfigVariable!]`](#ciconfigvariable).
| ---- | ---- | ----------- |
| <a id="projectciconfigvariablesref"></a>`ref` | [`String!`](#string) | Ref. |
##### `Project.ciPipelineCreationInputs`
Inputs to create a pipeline.
{{< details >}}
**Introduced** in GitLab 17.10.
**Status**: Experiment.
{{< /details >}}
Returns [`[CiInputSpec!]`](#ciinputspec).
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="projectcipipelinecreationinputsref"></a>`ref` | [`String!`](#string) | Ref where to create the pipeline. |
##### `Project.ciPipelineCreationRequest`
Get information about an asynchronous pipeline creation request.
@ -40852,6 +40885,17 @@ Values for sorting inherited variables.
| <a id="cigroupvariablessortkey_asc"></a>`KEY_ASC` | Key by ascending order. |
| <a id="cigroupvariablessortkey_desc"></a>`KEY_DESC` | Key by descending order. |
### `CiInputType`
Available input types.
| Value | Description |
| ----- | ----------- |
| <a id="ciinputtypearray"></a>`ARRAY` | Array input. |
| <a id="ciinputtypeboolean"></a>`BOOLEAN` | Boolean input. |
| <a id="ciinputtypenumber"></a>`NUMBER` | Number input. |
| <a id="ciinputtypestring"></a>`STRING` | String input. |
### `CiJobFailureReason`
| Value | Description |

View File

@ -183,7 +183,7 @@ class MigrationName < Elastic::Migration
end
```
#### `Elastic::MigrationRemoveFieldsHelper`
#### `Search::Elastic::MigrationRemoveFieldsHelper`
Removes specified fields from an index.
@ -193,7 +193,7 @@ Checks in batches if any documents that match `document_type` have the fields sp
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationRemoveFieldsHelper
include ::Search::Elastic::MigrationRemoveFieldsHelper
batched!
throttle_delay 1.minute
@ -218,7 +218,7 @@ The default batch size is `10_000`. You can override this value by specifying `B
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationRemoveFieldsHelper
include ::Search::Elastic::MigrationRemoveFieldsHelper
batched!
BATCH_SIZE = 100

View File

@ -0,0 +1,237 @@
# frozen_string_literal: true
# This concern contains shared functionality for bulk indexing documents in Elasticsearch and OpenSearch databases.
module ActiveContext
module Databases
module Concerns
# Transforms ActiveContext::Query objects into Elasticsearch/Opensearch query DSL format.
#
# This processor handles the conversion of various query types into their corresponding
# Elasticsearch/Opensearch query structures, including:
# - Term queries for exact matches (single values)
# - Terms queries for multiple value matches (array values)
# - Prefix queries for starts-with matches
# - Bool queries for AND/OR combinations
# - KNN queries for vector similarity search
#
# KNN queries are handled specially to ensure they work with Elasticsearch/Opensearch requirements:
# - Basic KNN queries are placed at the root level under the 'knn' key
# - When combining KNN with filters, the filters are included inside the KNN query under 'filter'
# - OR conditions with KNN maintain the KNN at root level with other conditions under 'query'
#
# @example Basic filter query with term
# query = ActiveContext::Query.filter(status: 'active')
# processor = Processor.new
# processor.process(query)
# # => { query: { bool: { must: [{ term: { status: 'active' } }] } } }
#
# @example Filter query with terms
# query = ActiveContext::Query.filter(status: ['active', 'pending'])
# processor = Processor.new
# processor.process(query)
# # => { query: { bool: { must: [{ terms: { status: ['active', 'pending'] } }] } } }
#
# @example KNN with filter
# query = ActiveContext::Query.filter(status: 'active').knn(
# target: 'embedding',
# vector: [0.1, 0.2],
# limit: 5
# )
# processor = Processor.new
# processor.process(query)
# # => {
# # knn: {
# # field: 'embedding',
# # query_vector: [0.1, 0.2],
# # k: 5,
# # num_candidates: 50,
# # filter: { bool: { must: [{ term: { status: 'active' } }] } }
# # }
# # }
module ElasticProcessor
include ActiveContext::Databases::Concerns::Processor
# Processes a query node and returns the corresponding Elasticsearch query
#
# @param node [ActiveContext::Query] The query node to process
# @return [Hash] The Elasticsearch query DSL
# @raise [ArgumentError] If the query type is not supported
def process(node)
case node.type
when :filter then process_filter(node.value)
when :prefix then process_prefix(node.value)
when :or then process_or(node)
when :and then process_and(node.children)
when :knn then process_knn(node)
when :limit then process_limit(node)
else
raise ArgumentError, "Unsupported node type: #{node.type}"
end
end
private
# Processes filter conditions into term or terms queries
#
# @param conditions [Hash] The filter conditions where keys are fields and values are the terms
# @return [Hash] A bool query with term/terms clauses in the must array
# @example Single value (term)
# process_filter(status: 'active')
# # => { query: { bool: { must: [{ term: { status: 'active' } }] } } }
# @example Array value (terms)
# process_filter(status: ['active', 'pending'])
# # => { query: { bool: { must: [{ terms: { status: ['active', 'pending'] } }] } } }
def process_filter(conditions)
build_bool_query(:must) do |queries|
conditions.each do |field, value|
queries << (value.is_a?(Array) ? { terms: { field => value } } : { term: { field => value } })
end
end
end
# Processes prefix conditions into prefix queries
#
# @param conditions [Hash] The prefix conditions where keys are fields and values are the prefixes
# @return [Hash] A bool query with prefix clauses in the must array
# @example
# process_prefix(name: 'test', path: 'foo/')
# # => { query: { bool: { must: [
# # { prefix: { name: 'test' } },
# # { prefix: { path: 'foo/' } }
# # ] } } }
def process_prefix(conditions)
build_bool_query(:must) do |queries|
conditions.each do |field, value|
queries << { prefix: { field => value } }
end
end
end
# Processes OR queries, with special handling for KNN
#
# @param node [ActiveContext::Query] The OR query node
# @return [Hash] Either:
# - A bool query with should clauses for simple OR conditions
# - A combined structure with KNN at root level and other conditions under 'query' for OR with KNN
# @see #process_simple_or
# @see #process_or_with_knn
def process_or(node)
if contains_knn?(node)
process_or_with_knn(node)
else
process_simple_or(node.children)
end
end
# Processes simple OR conditions (without KNN)
#
# @param children [Array<ActiveContext::Query>] The child queries to OR together
# @return [Hash] A bool query with should clauses and minimum_should_match: 1
# @example
# process_simple_or([filter_query, prefix_query])
# # => { query: { bool: {
# # should: [...],
# # minimum_should_match: 1
# # } } }
def process_simple_or(children)
build_bool_query(:should, minimum_should_match: 1) do |queries|
children.each do |child|
queries << extract_query(process(child))
end
end
end
# Processes OR conditions that include a KNN query
def process_or_with_knn(_)
raise NotImplementedError
end
# Processes AND conditions
#
# @param children [Array<ActiveContext::Query>] The child queries to AND together
# @return [Hash] A bool query with must clauses
# @example
# process_and([filter_query, prefix_query])
# # => { query: { bool: { must: [...] } } }
def process_and(children)
build_bool_query(:must) do |queries|
children.each do |child|
queries << extract_query(process(child))
end
end
end
# Processes KNN query, combining with optional filter conditions
def process_knn(_)
raise NotImplementedError
end
# Processes limit by adding size parameter
#
# @param node [ActiveContext::Query] The limit query node
# @return [Hash] The query with size parameter added
# @example
# # With size 10:
# # => { query: {...}, size: 10 }
def process_limit(node)
child_query = process(node.children.first)
child_query.merge(size: node.value)
end
# Checks if node contains a KNN query
#
# @param node [ActiveContext::Query] The query node to check
# @return [Boolean] true if any child is a KNN query
def contains_knn?(node)
node.children.any? { |child| child.type == :knn }
end
# Finds the KNN child in a query node
#
# @param node [ActiveContext::Query] The query node to search
# @return [ActiveContext::Query, nil] The KNN query node if found
def find_knn_child(node)
node.children.find { |child| child.type == :knn }
end
# Builds OR conditions excluding KNN query
#
# @param node [ActiveContext::Query] The query node to process
# @param knn_child [ActiveContext::Query] The KNN child to exclude
# @return [Hash] A bool query with the remaining conditions
def build_or_conditions(node, knn_child)
other_queries = node.children.reject { |child| child == knn_child }
return {} if other_queries.empty?
build_bool_query(:should, minimum_should_match: 1) do |queries|
other_queries.each { |child| queries << extract_query(process(child)) }
end
end
# Helper to build bool queries consistently
#
# @param type [:must, :should] The bool query type
# @param minimum_should_match [Integer, nil] Optional minimum matches for should clauses
# @yield [Array] Yields an array to add query clauses to
# @return [Hash] The constructed bool query
def build_bool_query(type, minimum_should_match: nil)
query = { bool: { type => [] } }
query[:bool][:minimum_should_match] = minimum_should_match if minimum_should_match
yield query[:bool][type]
{ query: query }
end
# Safely extracts query part from processed result
#
# @param processed [Hash] The processed query result
# @return [Hash] The query part
def extract_query(processed)
processed[:query]
end
end
end
end
end

View File

@ -3,52 +3,8 @@
module ActiveContext
module Databases
module Elasticsearch
# Transforms ActiveContext::Query objects into Elasticsearch query DSL format.
#
# This processor handles the conversion of various query types into their corresponding
# Elasticsearch query structures, including:
# - Term queries for exact matches (single values)
# - Terms queries for multiple value matches (array values)
# - Prefix queries for starts-with matches
# - Bool queries for AND/OR combinations
# - KNN queries for vector similarity search
#
# KNN queries are handled specially to ensure they work with Elasticsearch's requirements:
# - Basic KNN queries are placed at the root level under the 'knn' key
# - When combining KNN with filters, the filters are included inside the KNN query under 'filter'
# - OR conditions with KNN maintain the KNN at root level with other conditions under 'query'
#
# @example Basic filter query with term
# query = ActiveContext::Query.filter(status: 'active')
# processor = Processor.new
# processor.process(query)
# # => { query: { bool: { must: [{ term: { status: 'active' } }] } } }
#
# @example Filter query with terms
# query = ActiveContext::Query.filter(status: ['active', 'pending'])
# processor = Processor.new
# processor.process(query)
# # => { query: { bool: { must: [{ terms: { status: ['active', 'pending'] } }] } } }
#
# @example KNN with filter
# query = ActiveContext::Query.filter(status: 'active').knn(
# target: 'embedding',
# vector: [0.1, 0.2],
# limit: 5
# )
# processor = Processor.new
# processor.process(query)
# # => {
# # knn: {
# # field: 'embedding',
# # query_vector: [0.1, 0.2],
# # k: 5,
# # num_candidates: 50,
# # filter: { bool: { must: [{ term: { status: 'active' } }] } }
# # }
# # }
class Processor
include ActiveContext::Databases::Concerns::Processor
include Concerns::ElasticProcessor
# Transforms a query node into Elasticsearch query DSL
#
@ -60,129 +16,6 @@ module ActiveContext
new.process(node)
end
# Processes a query node and returns the corresponding Elasticsearch query
#
# @param node [ActiveContext::Query] The query node to process
# @return [Hash] The Elasticsearch query DSL
# @raise [ArgumentError] If the query type is not supported
def process(node)
case node.type
when :filter then process_filter(node.value)
when :prefix then process_prefix(node.value)
when :or then process_or(node)
when :and then process_and(node.children)
when :knn then process_knn(node)
when :limit then process_limit(node)
else
raise ArgumentError, "Unsupported node type: #{node.type}"
end
end
private
# Processes filter conditions into term or terms queries
#
# @param conditions [Hash] The filter conditions where keys are fields and values are the terms
# @return [Hash] A bool query with term/terms clauses in the must array
# @example Single value (term)
# process_filter(status: 'active')
# # => { query: { bool: { must: [{ term: { status: 'active' } }] } } }
# @example Array value (terms)
# process_filter(status: ['active', 'pending'])
# # => { query: { bool: { must: [{ terms: { status: ['active', 'pending'] } }] } } }
def process_filter(conditions)
build_bool_query(:must) do |queries|
conditions.each do |field, value|
queries << (value.is_a?(Array) ? { terms: { field => value } } : { term: { field => value } })
end
end
end
# Processes prefix conditions into prefix queries
#
# @param conditions [Hash] The prefix conditions where keys are fields and values are the prefixes
# @return [Hash] A bool query with prefix clauses in the must array
# @example
# process_prefix(name: 'test', path: 'foo/')
# # => { query: { bool: { must: [
# # { prefix: { name: 'test' } },
# # { prefix: { path: 'foo/' } }
# # ] } } }
def process_prefix(conditions)
build_bool_query(:must) do |queries|
conditions.each do |field, value|
queries << { prefix: { field => value } }
end
end
end
# Processes OR queries, with special handling for KNN
#
# @param node [ActiveContext::Query] The OR query node
# @return [Hash] Either:
# - A bool query with should clauses for simple OR conditions
# - A combined structure with KNN at root level and other conditions under 'query' for OR with KNN
# @see #process_simple_or
# @see #process_or_with_knn
def process_or(node)
if contains_knn?(node)
process_or_with_knn(node)
else
process_simple_or(node.children)
end
end
# Processes simple OR conditions (without KNN)
#
# @param children [Array<ActiveContext::Query>] The child queries to OR together
# @return [Hash] A bool query with should clauses and minimum_should_match: 1
# @example
# process_simple_or([filter_query, prefix_query])
# # => { query: { bool: {
# # should: [...],
# # minimum_should_match: 1
# # } } }
def process_simple_or(children)
build_bool_query(:should, minimum_should_match: 1) do |queries|
children.each do |child|
queries << extract_query(process(child))
end
end
end
# Processes OR conditions that include a KNN query
#
# @param node [ActiveContext::Query] The OR query node containing KNN
# @return [Hash] A combined structure with KNN at root level and other conditions under 'query'
# @example
# # For KNN OR filter:
# # => {
# # knn: { field: 'embedding', ... },
# # query: { bool: { should: [...], minimum_should_match: 1 } }
# # }
def process_or_with_knn(node)
knn_child = find_knn_child(node)
other_conditions = build_or_conditions(node, knn_child)
knn_params = extract_knn_params(knn_child)
other_conditions.empty? ? { knn: knn_params } : { knn: knn_params, query: extract_query(other_conditions) }
end
# Processes AND conditions
#
# @param children [Array<ActiveContext::Query>] The child queries to AND together
# @return [Hash] A bool query with must clauses
# @example
# process_and([filter_query, prefix_query])
# # => { query: { bool: { must: [...] } } }
def process_and(children)
build_bool_query(:must) do |queries|
children.each do |child|
queries << extract_query(process(child))
end
end
end
# Processes KNN query, combining with optional filter conditions
#
# @param node [ActiveContext::Query] The KNN query node
@ -206,69 +39,22 @@ module ActiveContext
{ knn: knn_params }
end
# Processes limit by adding size parameter
# Processes OR conditions that include a KNN query
#
# @param node [ActiveContext::Query] The limit query node
# @return [Hash] The query with size parameter added
# @param node [ActiveContext::Query] The OR query node containing KNN
# @return [Hash] A combined structure with KNN at root level and other conditions under 'query'
# @example
# # With size 10:
# # => { query: {...}, size: 10 }
def process_limit(node)
child_query = process(node.children.first)
child_query.merge(size: node.value)
end
# # For KNN OR filter:
# # => {
# # knn: { field: 'embedding', ... },
# # query: { bool: { should: [...], minimum_should_match: 1 } }
# # }
def process_or_with_knn(node)
knn_child = find_knn_child(node)
other_conditions = build_or_conditions(node, knn_child)
knn_params = extract_knn_params(knn_child)
# Checks if node contains a KNN query
#
# @param node [ActiveContext::Query] The query node to check
# @return [Boolean] true if any child is a KNN query
def contains_knn?(node)
node.children.any? { |child| child.type == :knn }
end
# Finds the KNN child in a query node
#
# @param node [ActiveContext::Query] The query node to search
# @return [ActiveContext::Query, nil] The KNN query node if found
def find_knn_child(node)
node.children.find { |child| child.type == :knn }
end
# Builds OR conditions excluding KNN query
#
# @param node [ActiveContext::Query] The query node to process
# @param knn_child [ActiveContext::Query] The KNN child to exclude
# @return [Hash] A bool query with the remaining conditions
def build_or_conditions(node, knn_child)
other_queries = node.children.reject { |child| child == knn_child }
return {} if other_queries.empty?
build_bool_query(:should, minimum_should_match: 1) do |queries|
other_queries.each { |child| queries << extract_query(process(child)) }
end
end
# Helper to build bool queries consistently
#
# @param type [:must, :should] The bool query type
# @param minimum_should_match [Integer, nil] Optional minimum matches for should clauses
# @yield [Array] Yields an array to add query clauses to
# @return [Hash] The constructed bool query
def build_bool_query(type, minimum_should_match: nil)
query = { bool: { type => [] } }
query[:bool][:minimum_should_match] = minimum_should_match if minimum_should_match
yield query[:bool][type]
{ query: query }
end
# Safely extracts query part from processed result
#
# @param processed [Hash] The processed query result
# @return [Hash] The query part
def extract_query(processed)
processed[:query]
other_conditions.empty? ? { knn: knn_params } : { knn: knn_params, query: extract_query(other_conditions) }
end
# Extracts KNN parameters from a node into the expected format

View File

@ -18,8 +18,11 @@ module ActiveContext
@options = options
end
def search(_query)
res = client.search
def search(collection:, query:)
raise ArgumentError, "Expected Query object, you used #{query.class}" unless query.is_a?(ActiveContext::Query)
es_query = Processor.transform(query)
res = client.search(index: collection, body: es_query)
QueryResult.new(res)
end

View File

@ -0,0 +1,101 @@
# frozen_string_literal: true
module ActiveContext
module Databases
module Opensearch
class Processor
include Concerns::ElasticProcessor
# Transforms a query node into Opensearch query DSL
#
# @param node [ActiveContext::Query] The query node to transform
# @return [Hash] The Opensearch query DSL
# @example
# Processor.transform(ActiveContext::Query.filter(status: 'active'))
def self.transform(node)
new.process(node)
end
# Processes KNN query, combining with optional filter conditions
#
# @param node [ActiveContext::Query] The KNN query node
# @return [Hash] KNN parameters at root level, with filter conditions nested inside KNN if present
# @example
# # Basic KNN:
# # => { knn: { field: 'embedding', ... } }
# # KNN with filter:
# # => {
# # knn: {
# # field: 'embedding',
# # ...,
# # filter: { bool: { must: [...] } }
# # }
# # }
def process_knn(node)
knn_params = extract_knn_params(node)
query = build_bool_query(:should) do |queries|
queries << { knn: knn_params }
end
base_query = node.children.any? ? process(node.children.first) : nil
if base_query
filter = extract_query(base_query)
query[:query][:bool][:must] = filter[:bool][:must]
end
query
end
# Processes OR conditions that include a KNN query
#
# @param node [ActiveContext::Query] The OR query node containing KNN
# @return [Hash] A combined structure with KNN at root level and other conditions under 'query'
# @example
# # For KNN OR filter:
# # => {
# # knn: { field: 'embedding', ... },
# # query: { bool: { should: [...], minimum_should_match: 1 } }
# # }
def process_or_with_knn(node)
knn_child = find_knn_child(node)
query = build_or_conditions(node, knn_child)
knn_query = { knn: extract_knn_params(knn_child) }
if query.empty?
build_bool_query(:should) do |queries|
queries << knn_query
end
else
query[:query][:bool][:should] << knn_query
query
end
end
# Extracts KNN parameters from a node into the expected format
#
# @param node [ActiveContext::Query] The KNN query node
# @return [Hash] The formatted KNN parameters
# @example
# # => {
# # 'embedding': {
# # vector: [0.1, 0.2],
# # k: 5
# # }
# # }
def extract_knn_params(node)
knn_params = node.value
k = knn_params[:limit]
{
knn_params[:target] => {
k: k,
vector: knn_params[:vector]
}
}
end
end
end
end
end

View File

@ -8,6 +8,7 @@ RSpec.describe ActiveContext::Databases::Opensearch::Client do
describe '#search' do
let(:opensearch_client) { instance_double(OpenSearch::Client) }
let(:search_response) { { 'hits' => { 'total' => 5, 'hits' => [] } } }
let(:query) { ActiveContext::Query.filter(project_id: 1) }
before do
allow(client).to receive(:client).and_return(opensearch_client)
@ -16,11 +17,11 @@ RSpec.describe ActiveContext::Databases::Opensearch::Client do
it 'calls search on the Opensearch client' do
expect(opensearch_client).to receive(:search)
client.search('query')
client.search(collection: 'test', query: query)
end
it 'returns a QueryResult object' do
result = client.search('query')
result = client.search(collection: 'test', query: query)
expect(result).to be_a(ActiveContext::Databases::Opensearch::QueryResult)
end
end

View File

@ -0,0 +1,373 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ActiveContext::Databases::Opensearch::Processor do
it_behaves_like 'a query processor'
describe '#process' do
subject(:processor) { described_class.new }
let(:simple_filter) { ActiveContext::Query.filter(status: 'active') }
let(:simple_prefix) { ActiveContext::Query.prefix(name: 'test') }
let(:simple_knn) do
ActiveContext::Query.knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
)
end
context 'with filter queries' do
it 'creates a term query for exact matches' do
query = ActiveContext::Query.filter(status: 'active', project_id: 123)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
must: [
{ term: { status: 'active' } },
{ term: { project_id: 123 } }
]
}
}
)
end
it 'creates a terms query for array values' do
query = ActiveContext::Query.filter(project_id: [1, 2, 3])
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
must: [
{ terms: { project_id: [1, 2, 3] } }
]
}
}
)
end
it 'handles mixed term and terms queries' do
query = ActiveContext::Query.filter(
status: 'active',
project_id: [1, 2, 3],
category: 'product'
)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
must: [
{ term: { status: 'active' } },
{ terms: { project_id: [1, 2, 3] } },
{ term: { category: 'product' } }
]
}
}
)
end
it 'combines multiple filter queries with array values in must clauses' do
filter1 = ActiveContext::Query.filter(status: %w[active pending])
filter2 = ActiveContext::Query.filter(category: 'product')
query = ActiveContext::Query.and(filter1, filter2)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
must: [
{ bool: { must: [{ terms: { status: %w[active pending] } }] } },
{ bool: { must: [{ term: { category: 'product' } }] } }
]
}
}
)
end
it 'combines multiple filter queries in must clauses' do
filter1 = ActiveContext::Query.filter(status: 'active')
filter2 = ActiveContext::Query.filter(category: 'product')
query = ActiveContext::Query.and(filter1, filter2)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
must: [
{ bool: { must: [{ term: { status: 'active' } }] } },
{ bool: { must: [{ term: { category: 'product' } }] } }
]
}
}
)
end
end
context 'with prefix queries' do
it 'creates a prefix query for starts-with matches' do
query = ActiveContext::Query.prefix(name: 'test', path: 'foo/')
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
must: [
{ prefix: { name: 'test' } },
{ prefix: { path: 'foo/' } }
]
}
}
)
end
end
context 'with OR queries' do
it 'creates a should query with minimum_should_match' do
query = ActiveContext::Query.or(simple_filter, simple_prefix)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
should: [
{ bool: { must: [{ term: { status: 'active' } }] } },
{ bool: { must: [{ prefix: { name: 'test' } }] } }
],
minimum_should_match: 1
}
}
)
end
it 'handles terms queries in OR conditions' do
filter1 = ActiveContext::Query.filter(project_id: [1, 2, 3])
filter2 = ActiveContext::Query.filter(status: 'active')
query = ActiveContext::Query.or(filter1, filter2)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
should: [
{ bool: { must: [{ terms: { project_id: [1, 2, 3] } }] } },
{ bool: { must: [{ term: { status: 'active' } }] } }
],
minimum_should_match: 1
}
}
)
end
context 'when containing KNN' do
it 'combines KNN with other conditions' do
query = ActiveContext::Query.or(simple_knn, simple_filter)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
should: [
{ bool: { must: [{ term: { status: 'active' } }] } },
{ knn: { 'embedding' => { k: 5, vector: [0.1, 0.2] } } }
],
minimum_should_match: 1
}
}
)
end
it 'returns only KNN query when no other conditions' do
query = ActiveContext::Query.or(simple_knn)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
should: [
{ knn: { 'embedding' => { k: 5, vector: [0.1, 0.2] } } }
]
}
}
)
end
end
end
context 'with AND queries' do
it 'creates a must query combining conditions' do
query = ActiveContext::Query.and(simple_filter, simple_prefix)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
must: [
{ bool: { must: [{ term: { status: 'active' } }] } },
{ bool: { must: [{ prefix: { name: 'test' } }] } }
]
}
}
)
end
end
context 'with KNN queries' do
it 'creates a basic KNN query' do
result = processor.process(simple_knn)
expect(result).to eq(
query: {
bool: {
should: [
{ knn: { 'embedding' => { k: 5, vector: [0.1, 0.2] } } }
]
}
}
)
end
it 'applies filters in the bool query' do
query = simple_filter.knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
should: [
{ knn: { 'embedding' => { k: 5, vector: [0.1, 0.2] } } }
],
must: [
{ term: { status: 'active' } }
]
}
}
)
end
it 'handles terms filter' do
filter = ActiveContext::Query.filter(project_id: [1, 2, 3])
query = filter.knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
should: [
{ knn: { 'embedding' => { k: 5, vector: [0.1, 0.2] } } }
],
must: [
{ terms: { project_id: [1, 2, 3] } }
]
}
}
)
end
it 'handles multiple filter conditions' do
filter1 = ActiveContext::Query.filter(status: 'active')
filter2 = ActiveContext::Query.filter(category: 'product')
base_query = ActiveContext::Query.and(filter1, filter2)
query = base_query.knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
should: [
{ knn: { 'embedding' => { k: 5, vector: [0.1, 0.2] } } }
],
must: [
{ bool: { must: [{ term: { status: 'active' } }] } },
{ bool: { must: [{ term: { category: 'product' } }] } }
]
}
}
)
end
it 'properly handles KNN with both prefix and filter conditions' do
filter = ActiveContext::Query.filter(status: 'active')
prefix = ActiveContext::Query.prefix(name: 'test')
base_query = ActiveContext::Query.and(filter, prefix)
query = base_query.knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
should: [
{ knn: { 'embedding' => { k: 5, vector: [0.1, 0.2] } } }
],
must: [
{ bool: { must: [{ term: { status: 'active' } }] } },
{ bool: { must: [{ prefix: { name: 'test' } }] } }
]
}
}
)
end
end
context 'with limit queries' do
it 'adds size parameter to the query' do
query = simple_filter.limit(10)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
must: [{ term: { status: 'active' } }]
}
},
size: 10
)
end
it 'adds size parameter if a KNN query is used' do
query = simple_knn.limit(10)
result = processor.process(query)
expect(result).to eq(
query: {
bool: {
should: [
{ knn: { 'embedding' => { k: 5, vector: [0.1, 0.2] } } }
]
}
},
size: 10
)
end
end
end
end

View File

@ -41,6 +41,10 @@ module Ci
param.nil? ? default : param
end
def type
self.class.type_name
end
# An input specification without a default value is required.
# For example:
# ```yaml
@ -60,6 +64,16 @@ module Ci
spec[:options]
end
def description
spec[:description]
end
def regex
return unless regex_provided?
spec[:regex]
end
private
def run_validations(value, default: false)
@ -82,11 +96,15 @@ module Ci
# Regex can be only be a StringInput and is validated accordingly.
def validate_regex(_value, _default)
return unless spec.key?(:regex)
return unless regex_provided?
error('RegEx validation can only be used with string inputs')
end
def regex_provided?
spec.key?(:regex)
end
def error(message)
@errors.push("`#{name}` input: #{message}")
end

View File

@ -23,22 +23,26 @@ module Ci
build_inputs!(specs.to_h)
end
def all_inputs
@inputs
end
def input_names
@inputs.map(&:name)
all_inputs.map(&:name)
end
def errors
@errors + @inputs.flat_map(&:errors)
@errors + all_inputs.flat_map(&:errors)
end
def validate_input_params!(params)
@inputs.each do |input|
all_inputs.each do |input|
input.validate_param!(params[input.name])
end
end
def to_params(params)
@inputs.inject({}) do |hash, input|
all_inputs.inject({}) do |hash, input|
hash.merge(input.name => input.actual_value(params[input.name]))
end
end

View File

@ -41,9 +41,9 @@ module Ci
override :validate_regex
def validate_regex(value, default)
return unless spec.key?(:regex) && value.is_a?(String)
return unless regex_provided? && value.is_a?(String)
safe_regex = ::Gitlab::UntrustedRegexp.new(spec[:regex])
safe_regex = ::Gitlab::UntrustedRegexp.new(regex)
return if safe_regex.match?(value)

View File

@ -9,7 +9,6 @@ module Gitlab
class Mapper
# Fetches file contents and verifies them
class Verifier < Base
# TODO: remove with https://gitlab.com/gitlab-org/gitlab/-/issues/520828
def skip_load_content!
tap { @skip_load_content = true }
end

View File

@ -45,7 +45,7 @@ module Gitlab
def validate_array_value_variables(variables)
variables.is_a?(Hash) &&
variables.keys.all?(&method(:validate_alphanumeric)) &&
variables.values.all?(&:present?) &&
variables.values.all? { |v| !v.nil? } &&
variables.values.flatten(1).all?(&method(:validate_alphanumeric))
end

View File

@ -43285,6 +43285,9 @@ msgstr ""
msgid "Pipeline|Variable"
msgstr ""
msgid "Pipeline|Variable type"
msgstr ""
msgid "Pipeline|Variables"
msgstr ""

View File

@ -212,7 +212,6 @@ spec/frontend/repository/components/table/index_spec.js
spec/frontend/repository/components/table/row_spec.js
spec/frontend/search/sidebar/components/checkbox_filter_spec.js
spec/frontend/search/topbar/components/app_spec.js
spec/frontend/sessions/new/components/email_verification_spec.js
spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
spec/frontend/set_status_modal/user_profile_set_status_wrapper_spec.js
spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js

View File

@ -10,9 +10,7 @@ RSpec.describe 'Work items list filters', :js, feature_category: :team_planning
let_it_be(:user2) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:sub_group) { create(:group, parent: group) }
let_it_be(:project) { create(:project, :public, group: group, developers: [user1, user2]) }
let_it_be(:sub_group_project) { create(:project, :public, group: sub_group, developers: [user1, user2]) }
let_it_be(:label1) { create(:label, project: project) }
let_it_be(:label2) { create(:label, project: project) }
@ -37,7 +35,7 @@ RSpec.describe 'Work items list filters', :js, feature_category: :team_planning
end
let_it_be(:task) do
create(:work_item, :task, project: sub_group_project,
create(:work_item, :task, project: project,
assignees: [user2],
author: user2,
confidential: true,
@ -137,15 +135,6 @@ RSpec.describe 'Work items list filters', :js, feature_category: :team_planning
end
end
describe 'group' do
it 'filters', :aggregate_failures do
select_tokens 'Group', sub_group.name, submit: true
expect(page).to have_css('.issue', count: 1)
expect(page).to have_link(task.title)
end
end
describe 'label' do
it 'filters', :aggregate_failures do
select_tokens 'Label', '=', label1.title, submit: true

View File

@ -1,8 +1,8 @@
import { GlForm, GlFormInput } from '@gitlab/ui';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert, VARIANT_SUCCESS } from '~/alert';
import { HTTP_STATUS_NOT_FOUND, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import EmailVerification from '~/sessions/new/components/email_verification.vue';
@ -324,7 +324,7 @@ describe('EmailVerification', () => {
findSecondaryEmailForm().vm.$emit('submit-email', secondaryEmail);
await nextTick();
await waitForPromises();
expect(findSecondaryEmailForm().exists()).toBe(false);
expect(wrapper.text()).toContain(secondaryEmail);
@ -340,7 +340,7 @@ describe('EmailVerification', () => {
findSecondaryEmailForm().vm.$emit('cancel');
await nextTick();
await waitForPromises();
expect(findSecondaryEmailForm().exists()).toBe(false);
expect(findCodeInput().element.value).toBe('');

View File

@ -327,6 +327,11 @@ describe('Create work item component', () => {
describe('Create work item', () => {
it('emits workItemCreated on successful mutation', async () => {
const workItem = { ...createWorkItemMutationResponse.data.workItemCreate.workItem };
// there is a mismatch between the response and the expected workItem object between CE and EE fixture
// so we need to remove the `promotedToEpicUrl` property from the expected workItem object
delete workItem.promotedToEpicUrl;
createComponent();
await waitForPromises();
@ -337,7 +342,7 @@ describe('Create work item component', () => {
expect(wrapper.emitted('workItemCreated')).toEqual([
[
{
workItem: createWorkItemMutationResponse.data.workItemCreate.workItem,
workItem: expect.objectContaining(workItem),
numberOfDiscussionsResolved: '1',
},
],

View File

@ -8,6 +8,7 @@ import LockedBadge from '~/issuable/components/locked_badge.vue';
import WorkItemCreatedUpdated from '~/work_items/components/work_item_created_updated.vue';
import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
import WorkItemStateBadge from '~/work_items/components/work_item_state_badge.vue';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import { mockAssignees, workItemByIidResponseFactory } from '../mock_data';
@ -24,6 +25,7 @@ describe('WorkItemCreatedUpdated component', () => {
const findConfidentialityBadge = () => wrapper.findComponent(ConfidentialityBadge);
const findLockedBadge = () => wrapper.findComponent(LockedBadge);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findWorkItemStateBadge = () => wrapper.findComponent(WorkItemStateBadge);
const createComponent = async ({
workItemIid = '1',
@ -32,12 +34,16 @@ describe('WorkItemCreatedUpdated component', () => {
confidential = false,
discussionLocked = false,
updateInProgress = false,
movedToWorkItemUrl = null,
duplicatedToWorkItemUrl = null,
} = {}) => {
const workItemQueryResponse = workItemByIidResponseFactory({
author,
updatedAt,
confidential,
discussionLocked,
movedToWorkItemUrl,
duplicatedToWorkItemUrl,
});
successHandler = jest.fn().mockResolvedValue(workItemQueryResponse);
@ -77,6 +83,29 @@ describe('WorkItemCreatedUpdated component', () => {
expect(successHandler).not.toHaveBeenCalled();
});
describe('WorkItemStateBadge props', () => {
it('passes URL props correctly when they exist', async () => {
// We'll never populate all of these attributes because
// a work item can only have one closed reason.
// For simplicity we're passing all of them to easily assert
// that the props are passed correctly.
//
// Leaves out promotedToEpicUrl because it's only available in
// the EE work items query which is not using in FOSS_ONLY mode
const workItemAttributes = {
movedToWorkItemUrl: 'http://example.com/moved',
duplicatedToWorkItemUrl: 'http://example.com/duplicated',
};
await createComponent(workItemAttributes);
const stateBadgeProps = findWorkItemStateBadge().props();
Object.entries(workItemAttributes).forEach(([prop, url]) => {
expect(stateBadgeProps[prop]).toBe(url);
});
});
});
it('shows work item type metadata with type and icon', async () => {
await createComponent();

View File

@ -13,6 +13,7 @@ import WorkItemAncestors from '~/work_items/components/work_item_ancestors/work_
import WorkItemDescription from '~/work_items/components/work_item_description.vue';
import WorkItemCreatedUpdated from '~/work_items/components/work_item_created_updated.vue';
import WorkItemAttributesWrapper from '~/work_items/components/work_item_attributes_wrapper.vue';
import WorkItemErrorTracking from '~/work_items/components/work_item_error_tracking.vue';
import WorkItemTree from '~/work_items/components/work_item_links/work_item_tree.vue';
import WorkItemRelationships from '~/work_items/components/work_item_relationships/work_item_relationships.vue';
import WorkItemNotes from '~/work_items/components/work_item_notes.vue';
@ -116,6 +117,7 @@ describe('WorkItemDetail component', () => {
const findAncestors = () => wrapper.findComponent(WorkItemAncestors);
const findCloseButton = () => wrapper.findByTestId('work-item-close');
const findWorkItemType = () => wrapper.findByTestId('work-item-type');
const findErrorTrackingWidget = () => wrapper.findComponent(WorkItemErrorTracking);
const findHierarchyTree = () => wrapper.findComponent(WorkItemTree);
const findWorkItemRelationships = () => wrapper.findComponent(WorkItemRelationships);
const findNotesWidget = () => wrapper.findComponent(WorkItemNotes);
@ -277,10 +279,7 @@ describe('WorkItemDetail component', () => {
expect(workItemUpdatedSubscriptionHandler).toHaveBeenCalledWith({ id });
});
it('fetches allowed children types for current work item', async () => {
createComponent();
await waitForPromises();
it('fetches allowed children types for current work item', () => {
expect(allowedChildrenTypesHandler).toHaveBeenCalled();
});
@ -291,6 +290,13 @@ describe('WorkItemDetail component', () => {
expect(findHierarchyTree().props('parentMilestone')).toEqual(milestone);
});
it('renders error tracking widget', () => {
expect(findErrorTrackingWidget().props()).toEqual({
fullPath: 'group/project',
identifier: '1',
});
});
});
describe('close button', () => {

View File

@ -90,6 +90,8 @@ describe('CreateBranchMergeRequestModal', () => {
const firePrimaryEvent = () => findGlModal().vm.$emit('primary', { preventDefault: jest.fn() });
const findPrimaryButton = () => findGlModal().props('actionPrimary');
const findPrivateForksSelector = () => wrapper.findComponent(ProjectFormGroup);
const findSourceBranch = () => wrapper.find('[data-testid="source-name"]');
const findTargetBranch = () => wrapper.find('[data-testid="target-name"]');
describe('when hosted at the root', () => {
beforeEach(() => {
@ -134,6 +136,31 @@ describe('CreateBranchMergeRequestModal', () => {
);
});
it('calls the create branch with correct source and target branch', async () => {
createWrapper();
await waitForPromises();
jest.spyOn(axios, 'post');
mock
.onPost(
'/fullPath/-/branches?branch_name=suggested_branch_name&format=json&issue_iid=1&ref=defaultBranch',
)
.reply(200, { data: { url: 'http://test.com/branch' } });
findSourceBranch().vm.$emit('input', 'source');
findTargetBranch().vm.$emit('input', 'target');
firePrimaryEvent();
await waitForPromises();
expect(axios.post).toHaveBeenCalledWith(
`/fullPath/-/branches?branch_name=target&format=json&issue_iid=1&ref=source`,
{
confidential_issue_project_id: null,
},
);
});
it('shows a success toast message when branch is created', async () => {
createWrapper();
await waitForPromises();
@ -186,11 +213,14 @@ describe('CreateBranchMergeRequestModal', () => {
)
.reply(200, { data: { url: 'http://test.com/branch' } });
findSourceBranch().vm.$emit('input', 'source_mr');
findTargetBranch().vm.$emit('input', 'target_mr');
firePrimaryEvent();
await waitForPromises();
expect(axios.post).toHaveBeenCalledWith(
`/fullPath/-/branches?branch_name=suggested_branch_name&format=json&issue_iid=1&ref=master`,
`/fullPath/-/branches?branch_name=target_mr&format=json&issue_iid=1&ref=source_mr`,
{
confidential_issue_project_id: null,
},
@ -201,7 +231,7 @@ describe('CreateBranchMergeRequestModal', () => {
await nextTick();
expect(visitUrl).toHaveBeenCalledWith(
'/fullPath/-/merge_requests/new?merge_request%5Bissue_iid%5D=1&merge_request%5Bsource_branch%5D=suggested_branch_name&merge_request%5Btarget_branch%5D=master',
'/fullPath/-/merge_requests/new?merge_request%5Bissue_iid%5D=1&merge_request%5Bsource_branch%5D=target_mr&merge_request%5Btarget_branch%5D=source_mr',
);
});

View File

@ -0,0 +1,73 @@
import { shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import Stacktrace from '~/error_tracking/components/stacktrace.vue';
import WorkItemErrorTracking from '~/work_items/components/work_item_error_tracking.vue';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
jest.mock('~/alert');
describe('WorkItemErrorTracking component', () => {
let axiosMock;
let wrapper;
const successResponse = {
error: {
stack_trace_entries: [{ id: 1 }, { id: 2 }],
},
};
const findStacktrace = () => wrapper.findComponent(Stacktrace);
const createComponent = () => {
wrapper = shallowMount(WorkItemErrorTracking, {
propsData: {
fullPath: 'group/project',
identifier: '12345',
},
});
};
beforeEach(() => {
axiosMock = new MockAdapter(axios);
});
afterEach(() => {
axiosMock.restore();
});
it('renders h2 heading', () => {
createComponent();
expect(wrapper.find('h2').text()).toBe('Stack trace');
});
it('makes call to stack trace endpoint', async () => {
createComponent();
await waitForPromises();
expect(axiosMock.history.get[0].url).toBe(
'/group/project/-/error_tracking/12345/stack_trace.json',
);
});
it('renders Stacktrace component when we get data', async () => {
axiosMock.onGet().reply(HTTP_STATUS_OK, successResponse);
createComponent();
await waitForPromises();
expect(findStacktrace().props('entries')).toEqual(
successResponse.error.stack_trace_entries.toReversed(),
);
});
it('renders alert when we fail to get data', async () => {
axiosMock.onGet().reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
createComponent();
await waitForPromises();
expect(createAlert).toHaveBeenCalledWith({ message: 'Failed to load stacktrace.' });
});
});

View File

@ -1,20 +1,32 @@
import { GlBadge } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { GlBadge, GlLink, GlSprintf } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { STATE_OPEN, STATE_CLOSED } from '~/work_items/constants';
import WorkItemStateBadge from '~/work_items/components/work_item_state_badge.vue';
describe('WorkItemStateBadge', () => {
let wrapper;
const createComponent = ({ workItemState = STATE_OPEN, showIcon = true } = {}) => {
wrapper = shallowMount(WorkItemStateBadge, {
const createComponent = ({
workItemState = STATE_OPEN,
showIcon = true,
movedToWorkItemUrl = '',
duplicatedToWorkItemUrl = '',
promotedToEpicUrl = '',
} = {}) => {
wrapper = mount(WorkItemStateBadge, {
propsData: {
workItemState,
showIcon,
movedToWorkItemUrl,
duplicatedToWorkItemUrl,
promotedToEpicUrl,
},
});
};
const findStatusBadge = () => wrapper.findComponent(GlBadge);
const findGlSprintf = () => wrapper.findComponent(GlSprintf);
const findGlLink = () => wrapper.findComponent(GlLink);
it.each`
state | showIcon | icon | stateText | variant
@ -32,4 +44,26 @@ describe('WorkItemStateBadge', () => {
expect(findStatusBadge().text()).toBe(stateText);
},
);
describe('closed state with link', () => {
it.each`
attribute | url | expectedText
${'movedToWorkItemUrl'} | ${'http://example.com/moved'} | ${'Closed (moved)'}
${'duplicatedToWorkItemUrl'} | ${'http://example.com/duplicated'} | ${'Closed (duplicated)'}
${'promotedToEpicUrl'} | ${'http://example.com/epic'} | ${'Closed (promoted)'}
`(
'renders correct text and link when $attribute is present on work item',
({ attribute, url, expectedText }) => {
const props = {
workItemState: STATE_CLOSED,
[attribute]: url,
};
createComponent(props);
expect(findGlSprintf().exists()).toBe(true);
expect(wrapper.text()).toContain(expectedText);
expect(findGlLink().attributes('href')).toBe(url);
},
);
});
});

View File

@ -20,11 +20,20 @@ describe('WorkItemStickyHeader', () => {
canUpdate = true,
features = {},
parentId = null,
movedToWorkItemUrl = null,
duplicatedToWorkItemUrl = null,
promotedToEpicUrl = null,
} = {}) => {
wrapper = shallowMountExtended(WorkItemStickyHeader, {
propsData: {
workItem: workItemResponseFactory({ canUpdate, confidential, discussionLocked }).data
.workItem,
workItem: workItemResponseFactory({
canUpdate,
confidential,
discussionLocked,
movedToWorkItemUrl,
duplicatedToWorkItemUrl,
promotedToEpicUrl,
}).data.workItem,
fullPath: '/test',
isStickyHeaderShowing: true,
workItemNotificationsSubscribed: true,
@ -124,6 +133,27 @@ describe('WorkItemStickyHeader', () => {
);
});
describe('WorkItemStateBadge props', () => {
it('passes URL props correctly when they exist', async () => {
// We'll never populate all of these attributes because
// a work item can only have one closed reason.
// For simplicity we're passing all of them to easily assert
// that the props are passed correctly.
const workItemAttributes = {
movedToWorkItemUrl: 'http://example.com/moved',
duplicatedToWorkItemUrl: 'http://example.com/duplicated',
promotedToEpicUrl: 'http://example.com/epic',
};
await createComponent(workItemAttributes);
const stateBadgeProps = findWorkItemStateBadge().props();
Object.entries(workItemAttributes).forEach(([prop, url]) => {
expect(stateBadgeProps[prop]).toBe(url);
});
});
});
describe('confidential badge', () => {
describe('when not confidential', () => {
beforeEach(() => {

View File

@ -749,4 +749,47 @@ describeSkipVue3(skipReason, () => {
expect(findIssuableList().props('issuables')).toEqual([]);
});
});
describe('group filter', () => {
describe('filtering by group', () => {
it('query excludes descendants and excludes projects', async () => {
mountComponent();
await waitForPromises();
findIssuableList().vm.$emit('filter', [
{
type: TOKEN_TYPE_GROUP,
value: { data: 'path/to/another/group', operator: OPERATOR_IS },
},
]);
await nextTick();
expect(defaultQueryHandler).toHaveBeenCalledWith(
expect.objectContaining({
excludeProjects: true,
includeDescendants: false,
}),
);
});
});
describe('not filtering by group', () => {
it('query includes descendants and includes projects', async () => {
mountComponent();
await waitForPromises();
findIssuableList().vm.$emit('filter', [
{ type: TOKEN_TYPE_AUTHOR, value: { data: 'homer', operator: OPERATOR_IS } },
]);
await nextTick();
expect(defaultQueryHandler).toHaveBeenCalledWith(
expect.objectContaining({
excludeProjects: false,
includeDescendants: true,
}),
);
});
});
});
});

View File

@ -183,6 +183,9 @@ export const workItemQueryResponse = {
iid: '1',
archived: false,
title: 'Test',
movedToWorkItemUrl: null,
duplicatedToWorkItemUrl: null,
promotedToEpicUrl: null,
state: 'OPEN',
description: 'description',
confidential: false,
@ -313,6 +316,9 @@ export const updateWorkItemMutationResponse = {
iid: '1',
archived: false,
title: 'Updated title',
movedToWorkItemUrl: null,
duplicatedToWorkItemUrl: null,
promotedToEpicUrl: null,
state: 'OPEN',
description: 'description',
confidential: false,
@ -452,6 +458,9 @@ export const convertWorkItemMutationResponse = {
iid: '1',
archived: false,
title: 'Updated title',
movedToWorkItemUrl: null,
duplicatedToWorkItemUrl: null,
promotedToEpicUrl: null,
state: 'OPEN',
description: 'description',
webUrl: 'http://gdk.test/gitlab-org/gitlab/-/issues/1',
@ -1438,6 +1447,7 @@ export const workItemResponseFactory = ({
discussionLocked = false,
canInviteMembers = false,
labelsWidgetPresent = true,
errorTrackingWidgetPresent = true,
hierarchyWidgetPresent = true,
linkedItemsWidgetPresent = true,
crmContactsWidgetPresent = true,
@ -1471,6 +1481,9 @@ export const workItemResponseFactory = ({
developmentWidgetPresent = false,
customFieldsWidgetPresent = true,
customFieldValues = null,
movedToWorkItemUrl = null,
duplicatedToWorkItemUrl = null,
promotedToEpicUrl = null,
} = {}) => ({
data: {
workItem: {
@ -1487,6 +1500,9 @@ export const workItemResponseFactory = ({
updatedAt,
closedAt: null,
author,
movedToWorkItemUrl,
duplicatedToWorkItemUrl,
promotedToEpicUrl,
project: {
id: 'gid://gitlab/Project/7',
__typename: 'Project',
@ -1682,6 +1698,13 @@ export const workItemResponseFactory = ({
},
}
: { type: 'MOCK TYPE' },
errorTrackingWidgetPresent
? {
__typename: 'WorkItemWidgetErrorTracking',
type: 'ERROR_TRACKING',
identifier: '1',
}
: { type: 'MOCK TYPE' },
hierarchyWidgetPresent
? {
__typename: 'WorkItemWidgetHierarchy',
@ -1850,6 +1873,9 @@ export const createWorkItemMutationResponse = {
archived: false,
title: 'Updated title',
state: 'OPEN',
movedToWorkItemUrl: null,
duplicatedToWorkItemUrl: null,
promotedToEpicUrl: null,
description: 'description',
confidential: false,
createdAt: '2022-08-03T12:41:54Z',
@ -1939,6 +1965,9 @@ export const workItemHierarchyNoUpdatePermissionResponse = {
iid: '1',
archived: false,
state: 'OPEN',
movedToWorkItemUrl: null,
duplicatedToWorkItemUrl: null,
promotedToEpicUrl: null,
workItemType: {
id: 'gid://gitlab/WorkItems::Type/6',
name: 'Issue',
@ -3146,6 +3175,9 @@ export const changeWorkItemParentMutationResponse = {
iid: '2',
archived: false,
state: 'OPEN',
movedToWorkItemUrl: null,
duplicatedToWorkItemUrl: null,
promotedToEpicUrl: null,
title: 'Foo',
confidential: false,
createdAt: '2022-08-03T12:41:54Z',
@ -5601,6 +5633,9 @@ export const createWorkItemQueryResponse = {
iid: NEW_WORK_ITEM_IID,
archived: false,
title: '',
movedToWorkItemUrl: null,
duplicatedToWorkItemUrl: null,
promotedToEpicUrl: null,
state: 'OPEN',
description: '',
confidential: false,

View File

@ -86,6 +86,7 @@ describe('Work items router', () => {
WorkItemCreateBranchMergeRequestModal: true,
WorkItemDevelopment: true,
WorkItemChangeTypeModal: true,
WorkItemErrorTracking: true,
},
});
};

View File

@ -47,7 +47,7 @@ RSpec.describe GitlabSchema.types['Project'], feature_category: :groups_and_proj
allows_multiple_merge_request_assignees allows_multiple_merge_request_reviewers is_forked
protectable_branches available_deploy_keys explore_catalog_path
container_protection_tag_rules allowed_custom_statuses
pages_force_https pages_use_unique_domain ci_pipeline_creation_request
pages_force_https pages_use_unique_domain ci_pipeline_creation_request ci_pipeline_creation_inputs
]
expect(described_class).to include_graphql_fields(*expected_fields)

View File

@ -57,6 +57,43 @@ RSpec.describe Ci::PipelineCreation::Inputs::SpecInputs, feature_category: :pipe
end
end
describe '#all_inputs' do
context 'when inputs exists' do
let(:specs) do
{
'string_param' => string_input_spec,
'number_param' => number_input_spec,
'boolean_param' => boolean_input_spec,
'array_param' => array_input_spec
}
end
it 'returns all inputs' do
inputs = described_class.new(specs).all_inputs
expect(inputs.map(&:name)).to eq(specs.keys)
expect(inputs[0]).to be_an_instance_of(Ci::PipelineCreation::Inputs::StringInput)
expect(inputs[0].default).to eq('test')
expect(inputs[1]).to be_an_instance_of(Ci::PipelineCreation::Inputs::NumberInput)
expect(inputs[1].default).to eq(42)
expect(inputs[2]).to be_an_instance_of(Ci::PipelineCreation::Inputs::BooleanInput)
expect(inputs[2].default).to be(true)
expect(inputs[3]).to be_an_instance_of(Ci::PipelineCreation::Inputs::ArrayInput)
expect(inputs[3].default).to eq(['item1'])
end
end
context 'when inputs do not exist' do
it 'returns empty array' do
expect(described_class.new(nil).all_inputs).to be_empty
expect(described_class.new({}).all_inputs).to be_empty
end
end
end
describe '#input_names' do
let(:specs) do
{

View File

@ -165,4 +165,86 @@ RSpec.describe Gitlab::Config::Entry::Validators, feature_category: :pipeline_co
end
end
end
describe described_class::VariablesValidator do
using RSpec::Parameterized::TableSyntax
context 'with array_values: false (default)' do
before do
klass.instance_eval do
validates :config, variables: true
end
allow(instance).to receive(:config).and_return(config)
end
where(:config, :valid_result) do
{ foo: 'bar' } | true
{ foo: '' } | true
{ foo: nil } | false
{ 'foo' => 'bar' } | true
{ 'foo' => '' } | true
{ foo: 'bar', baz: 'qux' } | true
{ foo: '', baz: '' } | true
{ 123 => 'bar' } | true
{ foo: 123 } | true
{ nil => 'bar' } | false
[] | false
'string' | false
end
with_them do
it 'validates the instance' do
expect(instance.valid?).to be(valid_result)
unless valid_result
expect(instance.errors.messages_for(:config)).to include(/should be a hash of key value pairs/)
end
end
end
end
context 'with array_values: true' do
before do
klass.instance_eval do
validates :config, variables: { array_values: true }
end
allow(instance).to receive(:config).and_return(config)
end
where(:config, :valid_result) do
{ foo: 'bar' } | true
{ foo: ['bar'] } | true
{ foo: '' } | true
{ foo: [''] } | true
{ foo: nil } | false
{ 'foo' => 'bar' } | true
{ 'foo' => ['bar'] } | true
{ 'foo' => '' } | true
{ 'foo' => [''] } | true
{ foo: 'bar', baz: 'qux' } | true
{ foo: ['bar'], baz: ['qux'] } | true
{ foo: '', baz: '' } | true
{ foo: [''], baz: [''] } | true
{ 123 => 'bar' } | true
{ foo: 123 } | true
{ foo: [123] } | true
{ nil => 'bar' } | false
[] | false
'string' | false
end
with_them do
it 'validates the instance' do
expect(instance.valid?).to be(valid_result)
unless valid_result
expect(instance.errors.messages_for(:config))
.to include(/should be a hash of key value pairs, value can be an array/)
end
end
end
end
end
end

View File

@ -59,6 +59,8 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
'ci_pipeline_chat_data.project_id',
'p_ci_pipeline_variables.project_id',
'ci_pipeline_messages.project_id',
# LFK already present on ci_pipeline_schedules and cascade delete all ci resources.
'ci_pipeline_schedule_variables.project_id',
'p_ci_job_annotations.project_id', # LFK already present on p_ci_builds and cascade delete all ci resources
'p_ci_pipelines_config.project_id', # LFK already present on p_ci_pipelines and cascade delete all ci resources
'dast_profiles_pipelines.project_id', # LFK already present on dast_profiles and will cascade delete

View File

@ -0,0 +1,244 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Query.project.ciPipelineCreationInputs', feature_category: :pipeline_composition do
include GraphqlHelpers
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:config_yaml_without_inputs) do
<<~YAML
job:
script: echo hello world
YAML
end
let_it_be(:config_yaml) do
<<~YAML
spec:
inputs:
mandatory_string_input:
mandatory_number_input:
type: number
mandatory_boolean_input:
type: boolean
description: 'Mandatory boolean input'
mandatory_array_input:
type: array
optional_string_input:
type: string
default: 'default-value'
optional_number_input:
type: number
default: 1
optional_boolean_input:
type: boolean
default: true
description: 'Optional boolean input'
optional_array_input:
type: array
default: [{ a: 1 }, { b: 2}]
string_input_with_options:
options: ['option1', 'option2', 'option3']
number_input_with_options:
type: number
options: [1, 2, 3]
string_input_with_regex:
regex: '[a-z]+'
---
job:
script: echo hello world
YAML
end
let(:query) do
<<~GQL
query {
project(fullPath: "#{project.full_path}") {
ciPipelineCreationInputs(ref: "#{ref}") {
name
type
description
required
default
options
regex
}
}
}
GQL
end
before_all do
project.repository.create_file(
project.creator,
'.gitlab-ci.yml',
config_yaml,
message: 'Add CI',
branch_name: 'master')
project.repository.create_file(
project.creator,
'.gitlab-ci.yml',
config_yaml_without_inputs,
message: 'Add CI',
branch_name: 'feature-no-inputs')
end
context 'when current user has access to the project' do
before_all do
project.add_developer(user)
end
context 'when inputs exist' do
let(:ref) { 'master' }
it 'returns the inputs' do
post_graphql(query, current_user: user)
expect(graphql_data['project']).to eq({
'ciPipelineCreationInputs' => [
{
'name' => 'mandatory_string_input',
'type' => 'STRING',
'description' => nil,
'required' => true,
'default' => nil,
'options' => nil,
'regex' => nil
},
{
'name' => 'mandatory_number_input',
'type' => 'NUMBER',
'description' => nil,
'required' => true,
'default' => nil,
'options' => nil,
'regex' => nil
},
{
'name' => 'mandatory_boolean_input',
'type' => 'BOOLEAN',
'description' => 'Mandatory boolean input',
'required' => true,
'default' => nil,
'options' => nil,
'regex' => nil
},
{
'name' => 'mandatory_array_input',
'type' => 'ARRAY',
'description' => nil,
'required' => true,
'default' => nil,
'options' => nil,
'regex' => nil
},
{
'name' => 'optional_string_input',
'type' => 'STRING',
'description' => nil,
'required' => false,
'default' => 'default-value',
'options' => nil,
'regex' => nil
},
{
'name' => 'optional_number_input',
'type' => 'NUMBER',
'description' => nil,
'required' => false,
'default' => 1,
'options' => nil,
'regex' => nil
},
{
'name' => 'optional_boolean_input',
'type' => 'BOOLEAN',
'description' => 'Optional boolean input',
'required' => false,
'default' => true,
'options' => nil,
'regex' => nil
},
{
'name' => 'optional_array_input',
'type' => 'ARRAY',
'description' => nil,
'required' => false,
'default' => [{ 'a' => 1 }, { 'b' => 2 }],
'options' => nil,
'regex' => nil
},
{
'name' => 'string_input_with_options',
'type' => 'STRING',
'description' => nil,
'required' => true,
'default' => nil,
'options' => %w[option1 option2 option3],
'regex' => nil
},
{
'name' => 'number_input_with_options',
'type' => 'NUMBER',
'description' => nil,
'required' => true,
'default' => nil,
'options' => [1, 2, 3],
'regex' => nil
},
{
'name' => 'string_input_with_regex',
'type' => 'STRING',
'description' => nil,
'required' => true,
'default' => nil,
'options' => nil,
'regex' => '[a-z]+'
}
]
})
end
end
context 'when input does not exist' do
let(:ref) { 'feature-no-inputs' }
it 'returns no inputs' do
post_graphql(query, current_user: user)
expect(graphql_data['project'])
.to eq({ 'ciPipelineCreationInputs' => [] })
end
end
context 'when ref is not found' do
let(:ref) { 'non-existent-ref' }
it 'returns an error' do
post_graphql(query, current_user: user)
expect(graphql_errors)
.to include(a_hash_including('message' => 'ref can only be an existing branch or tag'))
end
end
end
context 'when current user cannot access the project' do
let(:ref) { 'master' }
before_all do
project.add_guest(user)
end
it 'returns an error' do
post_graphql(query, current_user: user)
expect(graphql_data['project'])
.to eq('ciPipelineCreationInputs' => nil)
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::PipelineCreation::FindCiConfigSpecService, feature_category: :pipeline_composition do
RSpec.describe Ci::PipelineCreation::FindPipelineInputsService, feature_category: :pipeline_composition do
let(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@ -33,10 +33,6 @@ RSpec.describe Ci::PipelineCreation::FindCiConfigSpecService, feature_category:
YAML
end
let(:expected_spec) do
{ inputs: { foo: { default: 'bar' } } }
end
shared_examples 'successful response without spec' do
let(:config_yaml) { config_yaml_without_inputs }
@ -44,7 +40,11 @@ RSpec.describe Ci::PipelineCreation::FindCiConfigSpecService, feature_category:
result = service.execute
expect(result).to be_success
expect(result.payload).to eq(spec: {})
spec_inputs = result.payload.fetch(:inputs)
expect(spec_inputs).to be_a(::Ci::PipelineCreation::Inputs::SpecInputs)
expect(spec_inputs.errors).to be_empty
expect(spec_inputs.all_inputs).to be_empty
end
end
@ -53,7 +53,15 @@ RSpec.describe Ci::PipelineCreation::FindCiConfigSpecService, feature_category:
result = service.execute
expect(result).to be_success
expect(result.payload).to eq(spec: expected_spec)
spec_inputs = result.payload.fetch(:inputs)
expect(spec_inputs).to be_a(::Ci::PipelineCreation::Inputs::SpecInputs)
expect(spec_inputs.errors).to be_empty
input = spec_inputs.all_inputs.first
expect(input.name).to eq(:foo)
expect(input).to be_a(::Ci::PipelineCreation::Inputs::StringInput)
expect(input.default).to eq('bar')
end
end
@ -138,18 +146,18 @@ RSpec.describe Ci::PipelineCreation::FindCiConfigSpecService, feature_category:
expect(result.message).to eq('invalid YAML config')
end
end
end
context 'when an error occurs during yaml loading' do
it 'returns error response' do
allow(::Gitlab::Ci::Config::Yaml)
.to receive(:load!)
.and_raise(::Gitlab::Ci::Config::Yaml::LoadError)
context 'when an error occurs during yaml loading' do
it 'returns error response' do
allow(::Gitlab::Ci::Config::Yaml)
.to receive(:load!)
.and_raise(::Gitlab::Ci::Config::Yaml::LoadError)
result = service.execute
result = service.execute
expect(result).to be_error
expect(result.message).to match(/YAML load error/)
end
expect(result).to be_error
expect(result.message).to match(/YAML load error/)
end
end
@ -179,11 +187,11 @@ RSpec.describe Ci::PipelineCreation::FindCiConfigSpecService, feature_category:
end
end
it 'returns success response with empty spec' do
it 'returns error response' do
result = service.execute
expect(result).to be_success
expect(result.payload).to eq({ spec: {} })
expect(result).to be_error
expect(result.message).to eq('inputs not supported for this CI config source')
end
end
end