Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-05-27 09:14:17 +00:00
parent bd603a4188
commit 4e2b9c93ae
60 changed files with 414 additions and 259 deletions

View File

@ -4024,8 +4024,6 @@ Gitlab/BoundedContexts:
- 'ee/lib/ee/event_filter.rb' - 'ee/lib/ee/event_filter.rb'
- 'ee/lib/ee/feature.rb' - 'ee/lib/ee/feature.rb'
- 'ee/lib/ee/feature/definition.rb' - 'ee/lib/ee/feature/definition.rb'
- 'ee/lib/ee/legacy_model.rb'
- 'ee/lib/ee/model.rb'
- 'ee/lib/ee/sidebars/admin/menus/admin_settings_menu.rb' - 'ee/lib/ee/sidebars/admin/menus/admin_settings_menu.rb'
- 'ee/lib/ee/sidebars/admin/menus/monitoring_menu.rb' - 'ee/lib/ee/sidebars/admin/menus/monitoring_menu.rb'
- 'ee/lib/ee/sidebars/admin/panel.rb' - 'ee/lib/ee/sidebars/admin/panel.rb'

View File

@ -66,7 +66,6 @@ Graphql/Descriptions:
- 'ee/app/graphql/ee/types/branch_protections/base_access_level_type.rb' - 'ee/app/graphql/ee/types/branch_protections/base_access_level_type.rb'
- 'ee/app/graphql/ee/types/branch_rules/branch_protection_type.rb' - 'ee/app/graphql/ee/types/branch_rules/branch_protection_type.rb'
- 'ee/app/graphql/ee/types/issue_type.rb' - 'ee/app/graphql/ee/types/issue_type.rb'
- 'ee/app/graphql/ee/types/merge_request_type.rb'
- 'ee/app/graphql/ee/types/projects/branch_rule_type.rb' - 'ee/app/graphql/ee/types/projects/branch_rule_type.rb'
- 'ee/app/graphql/ee/types/user_merge_request_interaction_type.rb' - 'ee/app/graphql/ee/types/user_merge_request_interaction_type.rb'
- 'ee/app/graphql/resolvers/epics_resolver.rb' - 'ee/app/graphql/resolvers/epics_resolver.rb'

View File

@ -18,24 +18,3 @@ Layout/FirstArrayElementIndentation:
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/npm/npm_project_level_spec.rb' - 'qa/qa/specs/features/browser_ui/5_package/package_registry/npm/npm_project_level_spec.rb'
- 'qa/qa/specs/features/ee/api/9_data_stores/elasticsearch/index_tests/main_index/blob_index_spec.rb' - 'qa/qa/specs/features/ee/api/9_data_stores/elasticsearch/index_tests/main_index/blob_index_spec.rb'
- 'qa/qa/specs/features/ee/api/9_data_stores/elasticsearch/nightly_elasticsearch_test_spec.rb' - 'qa/qa/specs/features/ee/api/9_data_stores/elasticsearch/nightly_elasticsearch_test_spec.rb'
- 'spec/controllers/projects/pipelines_controller_spec.rb'
- 'spec/finders/groups/accepting_project_transfers_finder_spec.rb'
- 'spec/graphql/mutations/commits/create_spec.rb'
- 'spec/graphql/resolvers/environments/nested_environments_resolver_spec.rb'
- 'spec/graphql/types/commit_signatures/verification_status_enum_spec.rb'
- 'spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb'
- 'spec/lib/gitlab/background_migration/delete_orphaned_operational_vulnerabilities_spec.rb'
- 'spec/lib/gitlab/backtrace_cleaner_spec.rb'
- 'spec/lib/gitlab/ci/config/entry/id_token_spec.rb'
- 'spec/lib/gitlab/ci/secure_files/x509_name_spec.rb'
- 'spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb'
- 'spec/lib/gitlab/database/load_balancing/service_discovery/sampler_spec.rb'
- 'spec/lib/gitlab/diff/inline_diff_spec.rb'
- 'spec/lib/gitlab/github_import/parallel_scheduling_spec.rb'
- 'spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb'
- 'spec/lib/gitlab/usage_data/topology_spec.rb'
- 'spec/models/group_group_link_spec.rb'
- 'spec/models/project_group_link_spec.rb'
- 'spec/models/user_preference_spec.rb'
- 'spec/services/issues/referenced_merge_requests_service_spec.rb'
- 'spec/services/security/merge_reports_service_spec.rb'

View File

@ -147,7 +147,6 @@ Layout/LineEndStringConcatenationIndentation:
- 'ee/app/graphql/ee/resolvers/ci/all_jobs_resolver.rb' - 'ee/app/graphql/ee/resolvers/ci/all_jobs_resolver.rb'
- 'ee/app/graphql/ee/types/deployment_type.rb' - 'ee/app/graphql/ee/types/deployment_type.rb'
- 'ee/app/graphql/ee/types/group_type.rb' - 'ee/app/graphql/ee/types/group_type.rb'
- 'ee/app/graphql/ee/types/merge_request_type.rb'
- 'ee/app/graphql/ee/types/namespace_type.rb' - 'ee/app/graphql/ee/types/namespace_type.rb'
- 'ee/app/graphql/ee/types/permission_types/deployment.rb' - 'ee/app/graphql/ee/types/permission_types/deployment.rb'
- 'ee/app/graphql/ee/types/project_statistics_type.rb' - 'ee/app/graphql/ee/types/project_statistics_type.rb'

View File

@ -117,7 +117,6 @@ RSpec/FeatureCategory:
- 'ee/spec/graphql/ee/types/group_type_spec.rb' - 'ee/spec/graphql/ee/types/group_type_spec.rb'
- 'ee/spec/graphql/ee/types/issuable_type_spec.rb' - 'ee/spec/graphql/ee/types/issuable_type_spec.rb'
- 'ee/spec/graphql/ee/types/issue_sort_enum_spec.rb' - 'ee/spec/graphql/ee/types/issue_sort_enum_spec.rb'
- 'ee/spec/graphql/ee/types/merge_request_type_spec.rb'
- 'ee/spec/graphql/ee/types/milestone_type_spec.rb' - 'ee/spec/graphql/ee/types/milestone_type_spec.rb'
- 'ee/spec/graphql/ee/types/mutation_type_spec.rb' - 'ee/spec/graphql/ee/types/mutation_type_spec.rb'
- 'ee/spec/graphql/ee/types/notes/noteable_interface_spec.rb' - 'ee/spec/graphql/ee/types/notes/noteable_interface_spec.rb'

View File

@ -216,8 +216,6 @@ Style/RedundantSelf:
- 'ee/lib/ee/gitlab/auth/ldap/sync/users.rb' - 'ee/lib/ee/gitlab/auth/ldap/sync/users.rb'
- 'ee/lib/ee/gitlab/background_migration/delete_invalid_epic_issues.rb' - 'ee/lib/ee/gitlab/background_migration/delete_invalid_epic_issues.rb'
- 'ee/lib/ee/gitlab/database.rb' - 'ee/lib/ee/gitlab/database.rb'
- 'ee/lib/ee/legacy_model.rb'
- 'ee/lib/ee/model.rb'
- 'ee/lib/elastic/instance_proxy_util.rb' - 'ee/lib/elastic/instance_proxy_util.rb'
- 'ee/lib/elastic/latest/commit_config.rb' - 'ee/lib/elastic/latest/commit_config.rb'
- 'ee/lib/elastic/latest/issue_config.rb' - 'ee/lib/elastic/latest/issue_config.rb'

View File

@ -124,7 +124,7 @@ export default {
return extractDiscussions(this.design.discussions); return extractDiscussions(this.design.discussions);
}, },
markdownPreviewPath() { markdownPreviewPath() {
return `/${this.projectPath}/preview_markdown?target_type=Issue`; return `/${this.projectPath}/-/preview_markdown?target_type=Issue`;
}, },
designVariables() { designVariables() {
return { return {

View File

@ -0,0 +1,19 @@
<script>
import { GlTab } from '@gitlab/ui';
import { s__ } from '~/locale';
export default {
components: {
GlTab,
},
i18n: {
summaryTitle: s__('Environment|Summary'),
treeView: s__('Environment|Tree view'),
},
};
</script>
<template>
<gl-tab :title="$options.i18n.summaryTitle">
<p class="gl-mt-3 gl-text-secondary">{{ $options.i18n.treeView }}</p>
</gl-tab>
</template>

View File

@ -2,10 +2,12 @@
import { GlTabs, GlDrawer } from '@gitlab/ui'; import { GlTabs, GlDrawer } from '@gitlab/ui';
import { DRAWER_Z_INDEX } from '~/lib/utils/constants'; import { DRAWER_Z_INDEX } from '~/lib/utils/constants';
import { getContentWrapperHeight } from '~/lib/utils/dom_utils'; import { getContentWrapperHeight } from '~/lib/utils/dom_utils';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { k8sResourceType } from '~/environments/graphql/resolvers/kubernetes/constants'; import { k8sResourceType } from '~/environments/graphql/resolvers/kubernetes/constants';
import WorkloadDetails from '~/kubernetes_dashboard/components/workload_details.vue'; import WorkloadDetails from '~/kubernetes_dashboard/components/workload_details.vue';
import KubernetesPods from './kubernetes_pods.vue'; import KubernetesPods from './kubernetes_pods.vue';
import KubernetesServices from './kubernetes_services.vue'; import KubernetesServices from './kubernetes_services.vue';
import KubernetesSummary from './kubernetes_summary.vue';
const tabs = [k8sResourceType.k8sPods, k8sResourceType.k8sServices]; const tabs = [k8sResourceType.k8sPods, k8sResourceType.k8sServices];
@ -14,10 +16,11 @@ export default {
GlTabs, GlTabs,
KubernetesPods, KubernetesPods,
KubernetesServices, KubernetesServices,
KubernetesSummary,
GlDrawer, GlDrawer,
WorkloadDetails, WorkloadDetails,
}, },
mixins: [glFeatureFlagMixin()],
props: { props: {
configuration: { configuration: {
required: true, required: true,
@ -43,6 +46,9 @@ export default {
drawerHeaderHeight() { drawerHeaderHeight() {
return getContentWrapperHeight(); return getContentWrapperHeight();
}, },
renderTreeView() {
return this.glFeatures.k8sTreeView;
},
}, },
watch: { watch: {
activeTabIndex(newValue) { activeTabIndex(newValue) {
@ -64,6 +70,8 @@ export default {
<template> <template>
<div> <div>
<gl-tabs v-model="activeTabIndex"> <gl-tabs v-model="activeTabIndex">
<kubernetes-summary v-if="renderTreeView" />
<kubernetes-pods <kubernetes-pods
:namespace="namespace" :namespace="namespace"
:configuration="configuration" :configuration="configuration"

View File

@ -38,7 +38,7 @@ export default {
}, },
computed: { computed: {
showAiActions() { showAiActions() {
return this.resourceGlobalId && this.glFeatures.summarizeNotes; return this.resourceGlobalId && this.glFeatures.summarizeComments;
}, },
}, },
}; };

View File

@ -79,7 +79,7 @@ export default {
axios axios
.post( .post(
`${gon.relative_url_root}/${this.projectPath}/preview_markdown`, `${gon.relative_url_root}/${this.projectPath}/-/preview_markdown`,
postBody, postBody,
postOptions, postOptions,
) )

View File

@ -69,7 +69,7 @@ export const autocompleteDataSources = ({ fullPath, iid, isGroup = false }) => (
export const markdownPreviewPath = ({ fullPath, iid, isGroup = false }) => { export const markdownPreviewPath = ({ fullPath, iid, isGroup = false }) => {
const domain = gon.relative_url_root || ''; const domain = gon.relative_url_root || '';
const basePath = isGroup ? `groups/${fullPath}` : fullPath; const basePath = isGroup ? `groups/${fullPath}` : fullPath;
return `${domain}/${basePath}/preview_markdown?target_type=WorkItem&target_id=${iid}`; return `${domain}/${basePath}/-/preview_markdown?target_type=WorkItem&target_id=${iid}`;
}; };
export const isReference = (input) => { export const isReference = (input) => {

View File

@ -13,10 +13,13 @@ class Import::GitlabGroupsController < ApplicationController
return redirect_to new_group_path(anchor: 'import-group-pane'), alert: s_('GroupImport|Unable to process group import file') return redirect_to new_group_path(anchor: 'import-group-pane'), alert: s_('GroupImport|Unable to process group import file')
end end
group_data = group_params.except(:file).merge( group_data = group_params
visibility_level: closest_allowed_visibility_level, .except(:file)
import_export_upload: ImportExportUpload.new(import_file: group_params[:file]) .merge(
) visibility_level: closest_allowed_visibility_level,
import_export_upload: ImportExportUpload.new(import_file: group_params[:file])
)
.with_defaults(organization_id: Current.organization_id)
response = ::Groups::CreateService.new(current_user, group_data).execute response = ::Groups::CreateService.new(current_user, group_data).execute
group = response[:group] group = response[:group]

View File

@ -14,6 +14,10 @@ class Projects::EnvironmentsController < Projects::ApplicationController
push_frontend_feature_flag(:environments_folder_new_look, project) push_frontend_feature_flag(:environments_folder_new_look, project)
end end
before_action only: [:show] do
push_frontend_feature_flag(:k8s_tree_view, project)
end
before_action :authorize_read_environment! before_action :authorize_read_environment!
before_action :authorize_create_environment!, only: [:new, :create] before_action :authorize_create_environment!, only: [:new, :create]
before_action :authorize_stop_environment!, only: [:stop] before_action :authorize_stop_environment!, only: [:stop]

View File

@ -0,0 +1,9 @@
---
name: k8s_tree_view
feature_issue_url: https://gitlab.com/groups/gitlab-org/-/epics/13963
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/154083
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463616
milestone: '17.1'
group: group::environments
type: wip
default_enabled: false

View File

@ -4,7 +4,16 @@ classes:
- Analytics::CycleAnalytics::IssueStageEvent - Analytics::CycleAnalytics::IssueStageEvent
feature_categories: feature_categories:
- value_stream_management - value_stream_management
description: Contains various Issue-related timestamps for aggregating Value Stream Analytics data. description: Contains various Issue-related timestamps for aggregating Value Stream
Analytics data.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68950 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68950
milestone: '14.3' milestone: '14.3'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
allow_cross_joins:
- gitlab_main_clusterwide
allow_cross_transactions:
- gitlab_main_clusterwide
allow_cross_foreign_keys:
- gitlab_main_clusterwide
sharding_key:
group_id: namespaces

View File

@ -4,7 +4,16 @@ classes:
- Analytics::CycleAnalytics::MergeRequestStageEvent - Analytics::CycleAnalytics::MergeRequestStageEvent
feature_categories: feature_categories:
- value_stream_management - value_stream_management
description: Contains various MergeRequest-related timestamps for aggregating Value Stream Analytics data. description: Contains various MergeRequest-related timestamps for aggregating Value
Stream Analytics data.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68950 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68950
milestone: '14.3' milestone: '14.3'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
allow_cross_joins:
- gitlab_main_clusterwide
allow_cross_transactions:
- gitlab_main_clusterwide
allow_cross_foreign_keys:
- gitlab_main_clusterwide
sharding_key:
group_id: namespaces

View File

@ -7,4 +7,5 @@ feature_categories:
description: Stores hashes of Value Stream Analytics stage configurations. description: Stores hashes of Value Stream Analytics stage configurations.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67259 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67259
milestone: '14.2' milestone: '14.2'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/462758

View File

@ -8,3 +8,4 @@ description: Allows efficient batch deletion of data in object storage.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/9bf76fe03f8edf4f67023448161af27abb8fb521 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/9bf76fe03f8edf4f67023448161af27abb8fb521
milestone: '13.5' milestone: '13.5'
gitlab_schema: gitlab_ci gitlab_schema: gitlab_ci
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463245

View File

@ -4,7 +4,9 @@ classes:
- Ci::JobAnnotation - Ci::JobAnnotation
feature_categories: feature_categories:
- build_artifacts - build_artifacts
description: Stores user provided annotations for jobs. Currently storing extra information for a given job feed by API. description: Stores user provided annotations for jobs. Currently storing extra information
for a given job feed by API.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/117319 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/117319
milestone: '16.1' milestone: '16.1'
gitlab_schema: gitlab_ci gitlab_schema: gitlab_ci
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463246

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class DropTmpIndexCisVulnerabilityReadsOnId < Gitlab::Database::Migration[2.2]
INDEX_NAME = 'tmp_index_cis_vulnerability_reads_on_id'
disable_ddl_transaction!
milestone '17.1'
def up
remove_concurrent_index_by_name :vulnerability_reads, INDEX_NAME
end
def down
add_concurrent_index :vulnerability_reads, :id, name: INDEX_NAME, where: 'report_type = 7'
end
end

View File

@ -0,0 +1 @@
54013b78fabc7f31593f5ca677c624cbc266a07a48b4f24a7eef84b390b201ae

View File

@ -865,6 +865,22 @@ RETURN NEW;
END END
$$; $$;
CREATE FUNCTION trigger_8ac78f164b2d() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."namespace_id" IS NULL THEN
SELECT "namespace_id"
INTO NEW."namespace_id"
FROM "projects"
WHERE "projects"."id" = NEW."project_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_8e66b994e8f0() RETURNS trigger CREATE FUNCTION trigger_8e66b994e8f0() RETURNS trigger
LANGUAGE plpgsql LANGUAGE plpgsql
AS $$ AS $$
@ -897,22 +913,6 @@ RETURN NEW;
END END
$$; $$;
CREATE FUNCTION trigger_8ac78f164b2d() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."namespace_id" IS NULL THEN
SELECT "namespace_id"
INTO NEW."namespace_id"
FROM "projects"
WHERE "projects"."id" = NEW."project_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_94514aeadc50() RETURNS trigger CREATE FUNCTION trigger_94514aeadc50() RETURNS trigger
LANGUAGE plpgsql LANGUAGE plpgsql
AS $$ AS $$
@ -28481,8 +28481,6 @@ CREATE INDEX tmp_idx_orphaned_approval_project_rules ON approval_project_rules U
CREATE INDEX tmp_index_ci_job_artifacts_on_expire_at_where_locked_unknown ON ci_job_artifacts USING btree (expire_at, job_id) WHERE ((locked = 2) AND (expire_at IS NOT NULL)); CREATE INDEX tmp_index_ci_job_artifacts_on_expire_at_where_locked_unknown ON ci_job_artifacts USING btree (expire_at, job_id) WHERE ((locked = 2) AND (expire_at IS NOT NULL));
CREATE INDEX tmp_index_cis_vulnerability_reads_on_id ON vulnerability_reads USING btree (id) WHERE (report_type = 7);
CREATE INDEX tmp_index_for_null_member_namespace_id ON members USING btree (member_namespace_id) WHERE (member_namespace_id IS NULL); CREATE INDEX tmp_index_for_null_member_namespace_id ON members USING btree (member_namespace_id) WHERE (member_namespace_id IS NULL);
CREATE INDEX tmp_index_for_project_namespace_id_migration_on_routes ON routes USING btree (id) WHERE ((namespace_id IS NULL) AND ((source_type)::text = 'Project'::text)); CREATE INDEX tmp_index_for_project_namespace_id_migration_on_routes ON routes USING btree (id) WHERE ((namespace_id IS NULL) AND ((source_type)::text = 'Project'::text));
@ -30223,12 +30221,12 @@ CREATE TRIGGER trigger_56d49f4ed623 BEFORE INSERT OR UPDATE ON workspace_variabl
CREATE TRIGGER trigger_7a8b08eed782 BEFORE INSERT OR UPDATE ON boards_epic_board_positions FOR EACH ROW EXECUTE FUNCTION trigger_7a8b08eed782(); CREATE TRIGGER trigger_7a8b08eed782 BEFORE INSERT OR UPDATE ON boards_epic_board_positions FOR EACH ROW EXECUTE FUNCTION trigger_7a8b08eed782();
CREATE TRIGGER trigger_8ac78f164b2d BEFORE INSERT OR UPDATE ON design_management_repositories FOR EACH ROW EXECUTE FUNCTION trigger_8ac78f164b2d();
CREATE TRIGGER trigger_8e66b994e8f0 BEFORE INSERT OR UPDATE ON audit_events_streaming_event_type_filters FOR EACH ROW EXECUTE FUNCTION trigger_8e66b994e8f0(); CREATE TRIGGER trigger_8e66b994e8f0 BEFORE INSERT OR UPDATE ON audit_events_streaming_event_type_filters FOR EACH ROW EXECUTE FUNCTION trigger_8e66b994e8f0();
CREATE TRIGGER trigger_8fbb044c64ad BEFORE INSERT OR UPDATE ON design_management_designs FOR EACH ROW EXECUTE FUNCTION trigger_8fbb044c64ad(); CREATE TRIGGER trigger_8fbb044c64ad BEFORE INSERT OR UPDATE ON design_management_designs FOR EACH ROW EXECUTE FUNCTION trigger_8fbb044c64ad();
CREATE TRIGGER trigger_8ac78f164b2d BEFORE INSERT OR UPDATE ON design_management_repositories FOR EACH ROW EXECUTE FUNCTION trigger_8ac78f164b2d();
CREATE TRIGGER trigger_94514aeadc50 BEFORE INSERT OR UPDATE ON deployment_approvals FOR EACH ROW EXECUTE FUNCTION trigger_94514aeadc50(); CREATE TRIGGER trigger_94514aeadc50 BEFORE INSERT OR UPDATE ON deployment_approvals FOR EACH ROW EXECUTE FUNCTION trigger_94514aeadc50();
CREATE TRIGGER trigger_b4520c29ea74 BEFORE INSERT OR UPDATE ON approval_merge_request_rule_sources FOR EACH ROW EXECUTE FUNCTION trigger_b4520c29ea74(); CREATE TRIGGER trigger_b4520c29ea74 BEFORE INSERT OR UPDATE ON approval_merge_request_rule_sources FOR EACH ROW EXECUTE FUNCTION trigger_b4520c29ea74();

View File

@ -23841,7 +23841,7 @@ Defines which user roles, users, or groups can merge into a protected branch.
| <a id="mergerequestallowcollaboration"></a>`allowCollaboration` | [`Boolean`](#boolean) | Indicates if members of the target project can push to the fork. | | <a id="mergerequestallowcollaboration"></a>`allowCollaboration` | [`Boolean`](#boolean) | Indicates if members of the target project can push to the fork. |
| <a id="mergerequestallowsmultipleassignees"></a>`allowsMultipleAssignees` | [`Boolean!`](#boolean) | Allows assigning multiple users to a merge request. | | <a id="mergerequestallowsmultipleassignees"></a>`allowsMultipleAssignees` | [`Boolean!`](#boolean) | Allows assigning multiple users to a merge request. |
| <a id="mergerequestallowsmultiplereviewers"></a>`allowsMultipleReviewers` | [`Boolean!`](#boolean) | Allows assigning multiple reviewers to a merge request. | | <a id="mergerequestallowsmultiplereviewers"></a>`allowsMultipleReviewers` | [`Boolean!`](#boolean) | Allows assigning multiple reviewers to a merge request. |
| <a id="mergerequestapprovalstate"></a>`approvalState` | [`MergeRequestApprovalState!`](#mergerequestapprovalstate) | Information relating to rules that must be satisfied to merge this merge request. | | <a id="mergerequestapprovalstate"></a>`approvalState` | [`MergeRequestApprovalState!`](#mergerequestapprovalstate) | Information relating to rules that must be satisfied to merge the merge request. |
| <a id="mergerequestapprovalsleft"></a>`approvalsLeft` | [`Int`](#int) | Number of approvals left. | | <a id="mergerequestapprovalsleft"></a>`approvalsLeft` | [`Int`](#int) | Number of approvals left. |
| <a id="mergerequestapprovalsrequired"></a>`approvalsRequired` | [`Int`](#int) | Number of approvals required. | | <a id="mergerequestapprovalsrequired"></a>`approvalsRequired` | [`Int`](#int) | Number of approvals required. |
| <a id="mergerequestapproved"></a>`approved` | [`Boolean!`](#boolean) | Indicates if the merge request has all the required approvals. | | <a id="mergerequestapproved"></a>`approved` | [`Boolean!`](#boolean) | Indicates if the merge request has all the required approvals. |
@ -23890,6 +23890,7 @@ Defines which user roles, users, or groups can merge into a protected branch.
| <a id="mergerequestmergerequestdiffs"></a>`mergeRequestDiffs` **{warning-solid}** | [`MergeRequestDiffConnection`](#mergerequestdiffconnection) | **Introduced** in GitLab 16.2. **Status**: Experiment. Diff versions of a merge request. | | <a id="mergerequestmergerequestdiffs"></a>`mergeRequestDiffs` **{warning-solid}** | [`MergeRequestDiffConnection`](#mergerequestdiffconnection) | **Introduced** in GitLab 16.2. **Status**: Experiment. Diff versions of a merge request. |
| <a id="mergerequestmergestatus"></a>`mergeStatus` **{warning-solid}** | [`String`](#string) | **Deprecated** in GitLab 14.0. This was renamed. Use: [`MergeRequest.mergeStatusEnum`](#mergerequestmergestatusenum). | | <a id="mergerequestmergestatus"></a>`mergeStatus` **{warning-solid}** | [`String`](#string) | **Deprecated** in GitLab 14.0. This was renamed. Use: [`MergeRequest.mergeStatusEnum`](#mergerequestmergestatusenum). |
| <a id="mergerequestmergestatusenum"></a>`mergeStatusEnum` | [`MergeStatus`](#mergestatus) | Merge status of the merge request. | | <a id="mergerequestmergestatusenum"></a>`mergeStatusEnum` | [`MergeStatus`](#mergestatus) | Merge status of the merge request. |
| <a id="mergerequestmergetrainindex"></a>`mergeTrainIndex` | [`Int`](#int) | Zero-based position of the merge request in the merge train. Returns `null` if the merge request is not in a merge train. |
| <a id="mergerequestmergetrainscount"></a>`mergeTrainsCount` | [`Int`](#int) | Number of merge requests in the merge train. | | <a id="mergerequestmergetrainscount"></a>`mergeTrainsCount` | [`Int`](#int) | Number of merge requests in the merge train. |
| <a id="mergerequestmergeuser"></a>`mergeUser` | [`UserCore`](#usercore) | User who merged this merge request or set it to auto-merge. | | <a id="mergerequestmergeuser"></a>`mergeUser` | [`UserCore`](#usercore) | User who merged this merge request or set it to auto-merge. |
| <a id="mergerequestmergewhenpipelinesucceeds"></a>`mergeWhenPipelineSucceeds` | [`Boolean`](#boolean) | Indicates if the merge has been set to auto-merge. | | <a id="mergerequestmergewhenpipelinesucceeds"></a>`mergeWhenPipelineSucceeds` | [`Boolean`](#boolean) | Indicates if the merge has been set to auto-merge. |

View File

@ -224,6 +224,7 @@ To add a new audit event type:
| `milestone` | yes | Milestone in which this type was added | | `milestone` | yes | Milestone in which this type was added |
| `saved_to_database` | yes | Indicate whether to persist events to database and JSON logs | | `saved_to_database` | yes | Indicate whether to persist events to database and JSON logs |
| `streamed` | yes | Indicate that events should be streamed to external services (if configured) | | `streamed` | yes | Indicate that events should be streamed to external services (if configured) |
| `scope` | yes | List of scopes that this audit event type is available for. Should be an Array containing one or more of `Project`, `User`, `Group` or `Instance` |
### Generate documentation ### Generate documentation

View File

@ -651,10 +651,10 @@ end
### Code in `app/models/` ### Code in `app/models/`
EE-specific models should `extend EE::Model`. EE-specific models should be defined in `ee/app/models/`.
For example, if EE has a specific `Tanuki` model, you would To override a CE model create the file in
place it in `ee/app/models/ee/tanuki.rb`. `ee/app/models/ee/` and add new code to a `prepended` block.
ActiveRecord `enums` should be entirely ActiveRecord `enums` should be entirely
[defined in FOSS](database/creating_enums.md#all-of-the-keyvalue-pairs-should-be-defined-in-foss). [defined in FOSS](database/creating_enums.md#all-of-the-keyvalue-pairs-should-be-defined-in-foss).

View File

@ -65,16 +65,13 @@ module API
requires :name, type: String, desc: 'Group name' requires :name, type: String, desc: 'Group name'
requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: 'The group export file to be imported', documentation: { type: 'file' } requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: 'The group export file to be imported', documentation: { type: 'file' }
optional :parent_id, type: Integer, desc: "The ID of the parent group that the group will be imported into. Defaults to the current user's namespace." optional :parent_id, type: Integer, desc: "The ID of the parent group that the group will be imported into. Defaults to the current user's namespace."
optional :organization_id, type: Integer, desc: "The ID of the organization that the group will be part of. " optional :organization_id, type: Integer, default: -> { Current.organization_id }, desc: "The ID of the organization that the group will be part of. "
end end
post 'import' do post 'import' do
authorize_create_group! authorize_create_group!
require_gitlab_workhorse! require_gitlab_workhorse!
validate_file! validate_file!
# Only set `organization_id` if it isn't already present and can't be inferred from the `parent_id`.
params[:organization_id] ||= Current.organization_id if params[:parent_id].blank?
group_params = { group_params = {
path: params[:path], path: params[:path],
name: params[:name], name: params[:name],

View File

@ -20462,6 +20462,9 @@ msgstr ""
msgid "Environment|Stalled" msgid "Environment|Stalled"
msgstr "" msgstr ""
msgid "Environment|Summary"
msgstr ""
msgid "Environment|There are no Kubernetes cluster connections configured for this environment. Connect a cluster to add the status of your workloads, resources, and the Flux reconciliation state to the dashboard. %{linkStart}Learn more about Kubernetes integration.%{linkEnd}" msgid "Environment|There are no Kubernetes cluster connections configured for this environment. Connect a cluster to add the status of your workloads, resources, and the Flux reconciliation state to the dashboard. %{linkStart}Learn more about Kubernetes integration.%{linkEnd}"
msgstr "" msgstr ""
@ -20471,6 +20474,9 @@ msgstr ""
msgid "Environment|There was an error fetching %{resourceType}." msgid "Environment|There was an error fetching %{resourceType}."
msgstr "" msgstr ""
msgid "Environment|Tree view"
msgstr ""
msgid "Environment|Unauthorized to access %{resourceType} from this environment." msgid "Environment|Unauthorized to access %{resourceType} from this environment."
msgstr "" msgstr ""
@ -32042,6 +32048,15 @@ msgstr ""
msgid "Members|Remove group" msgid "Members|Remove group"
msgstr "" msgstr ""
msgid "Members|Requested By"
msgstr ""
msgid "Members|Requested On"
msgstr ""
msgid "Members|Requested Role"
msgstr ""
msgid "Members|Revert to LDAP synced settings" msgid "Members|Revert to LDAP synced settings"
msgstr "" msgstr ""
@ -61309,6 +61324,9 @@ msgstr ""
msgid "ciReport|Container Scanning" msgid "ciReport|Container Scanning"
msgstr "" msgstr ""
msgid "ciReport|Container Scanning for Registry"
msgstr ""
msgid "ciReport|Container scanning" msgid "ciReport|Container scanning"
msgstr "" msgstr ""

View File

@ -24,7 +24,7 @@ gem 'rainbow', '~> 3.1.1'
gem 'rspec-parameterized', '~> 1.0.2' gem 'rspec-parameterized', '~> 1.0.2'
gem 'octokit', '~> 8.1.0' gem 'octokit', '~> 8.1.0'
gem "faraday-retry", "~> 2.2", ">= 2.2.1" gem "faraday-retry", "~> 2.2", ">= 2.2.1"
gem 'zeitwerk', '~> 2.6', '>= 2.6.14' gem 'zeitwerk', '~> 2.6', '>= 2.6.15'
gem 'influxdb-client', '~> 3.1' gem 'influxdb-client', '~> 3.1'
gem 'terminal-table', '~> 3.0.2', require: false gem 'terminal-table', '~> 3.0.2', require: false
gem 'slack-notifier', '~> 2.4', require: false gem 'slack-notifier', '~> 2.4', require: false

View File

@ -356,7 +356,7 @@ GEM
websocket (1.2.10) websocket (1.2.10)
xpath (3.2.0) xpath (3.2.0)
nokogiri (~> 1.8) nokogiri (~> 1.8)
zeitwerk (2.6.14) zeitwerk (2.6.15)
PLATFORMS PLATFORMS
ruby ruby
@ -399,7 +399,7 @@ DEPENDENCIES
slack-notifier (~> 2.4) slack-notifier (~> 2.4)
terminal-table (~> 3.0.2) terminal-table (~> 3.0.2)
warning (~> 1.3) warning (~> 1.3)
zeitwerk (~> 2.6, >= 2.6.14) zeitwerk (~> 2.6, >= 2.6.15)
BUNDLED WITH BUNDLED WITH
2.5.10 2.5.10

View File

@ -941,8 +941,8 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
expect(response).to have_gitlab_http_status(:bad_request) expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['errors']).to eq([ expect(json_response['errors']).to eq([
'test job: chosen stage does not exist; available stages are .pre, build, test, deploy, .post' 'test job: chosen stage does not exist; available stages are .pre, build, test, deploy, .post'
]) ])
expect(json_response['warnings'][0]).to include( expect(json_response['warnings'][0]).to include(
'jobs:build may allow multiple pipelines to run for a single action due to `rules:when`' 'jobs:build may allow multiple pipelines to run for a single action due to `rules:when`'
) )

View File

@ -169,7 +169,7 @@ describe('Design management design index page', () => {
it('passes correct props to sidebar component', () => { it('passes correct props to sidebar component', () => {
expect(findSidebar().props()).toEqual({ expect(findSidebar().props()).toEqual({
design: expect.any(Object), design: expect.any(Object),
markdownPreviewPath: '/project-path/preview_markdown?target_type=Issue', markdownPreviewPath: '/project-path/-/preview_markdown?target_type=Issue',
resolvedDiscussionsExpanded: false, resolvedDiscussionsExpanded: false,
isLoading: false, isLoading: false,
isOpen: true, isOpen: true,

View File

@ -0,0 +1,29 @@
import { shallowMount } from '@vue/test-utils';
import { GlTab } from '@gitlab/ui';
import KubernetesSummary from '~/environments/environment_details/components/kubernetes/kubernetes_summary.vue';
describe('~/environments/environment_details/components/kubernetes/kubernetes_summary.vue', () => {
let wrapper;
const findTab = () => wrapper.findComponent(GlTab);
const createWrapper = () => {
wrapper = shallowMount(KubernetesSummary, {
stubs: { GlTab },
});
};
describe('mounted', () => {
beforeEach(() => {
createWrapper();
});
it('renders summary tab', () => {
expect(findTab().attributes('title')).toBe('Summary');
});
it('renders tree view title', () => {
expect(findTab().text()).toBe('Tree view');
});
});
});

View File

@ -4,6 +4,7 @@ import { GlTabs, GlDrawer } from '@gitlab/ui';
import KubernetesTabs from '~/environments/environment_details/components/kubernetes/kubernetes_tabs.vue'; import KubernetesTabs from '~/environments/environment_details/components/kubernetes/kubernetes_tabs.vue';
import KubernetesPods from '~/environments/environment_details/components/kubernetes/kubernetes_pods.vue'; import KubernetesPods from '~/environments/environment_details/components/kubernetes/kubernetes_pods.vue';
import KubernetesServices from '~/environments/environment_details/components/kubernetes/kubernetes_services.vue'; import KubernetesServices from '~/environments/environment_details/components/kubernetes/kubernetes_services.vue';
import KubernetesSummary from '~/environments/environment_details/components/kubernetes/kubernetes_summary.vue';
import WorkloadDetails from '~/kubernetes_dashboard/components/workload_details.vue'; import WorkloadDetails from '~/kubernetes_dashboard/components/workload_details.vue';
import { k8sResourceType } from '~/environments/graphql/resolvers/kubernetes/constants'; import { k8sResourceType } from '~/environments/graphql/resolvers/kubernetes/constants';
import { mockKasTunnelUrl } from 'jest/environments/mock_data'; import { mockKasTunnelUrl } from 'jest/environments/mock_data';
@ -22,31 +23,49 @@ describe('~/environments/environment_details/components/kubernetes/kubernetes_ta
const findTabs = () => wrapper.findComponent(GlTabs); const findTabs = () => wrapper.findComponent(GlTabs);
const findKubernetesPods = () => wrapper.findComponent(KubernetesPods); const findKubernetesPods = () => wrapper.findComponent(KubernetesPods);
const findKubernetesServices = () => wrapper.findComponent(KubernetesServices); const findKubernetesServices = () => wrapper.findComponent(KubernetesServices);
const findKubernetesSummary = () => wrapper.findComponent(KubernetesSummary);
const findDrawer = () => wrapper.findComponent(GlDrawer); const findDrawer = () => wrapper.findComponent(GlDrawer);
const findWorkloadDetails = () => wrapper.findComponent(WorkloadDetails); const findWorkloadDetails = () => wrapper.findComponent(WorkloadDetails);
const createWrapper = (activeTab = k8sResourceType.k8sPods) => { const createWrapper = ({
activeTab = k8sResourceType.k8sPods,
k8sTreeViewEnabled = false,
} = {}) => {
wrapper = shallowMount(KubernetesTabs, { wrapper = shallowMount(KubernetesTabs, {
provide: {
glFeatures: { k8sTreeView: k8sTreeViewEnabled },
},
propsData: { configuration, namespace, value: activeTab }, propsData: { configuration, namespace, value: activeTab },
stubs: { GlDrawer }, stubs: { GlDrawer },
}); });
}; };
describe('mounted', () => { describe('mounted', () => {
beforeEach(() => { describe('when `k8sTreeView feature flag is disabled', () => {
createWrapper(); beforeEach(() => {
createWrapper();
});
it('shows tabs', () => {
expect(findTabs().exists()).toBe(true);
});
it('renders pods tab', () => {
expect(findKubernetesPods().props()).toEqual({ namespace, configuration });
});
it('renders services tab', () => {
expect(findKubernetesServices().props()).toEqual({ namespace, configuration });
});
it("doesn't render summary tab", () => {
expect(findKubernetesSummary().exists()).toBe(false);
});
}); });
it('shows tabs', () => { it('renders summary tab if the feature flag is enabled', () => {
expect(findTabs().exists()).toBe(true); createWrapper({ k8sTreeViewEnabled: true });
}); expect(findKubernetesSummary().exists()).toBe(true);
it('renders pods tab', () => {
expect(findKubernetesPods().props()).toEqual({ namespace, configuration });
});
it('renders services tab', () => {
expect(findKubernetesServices().props()).toEqual({ namespace, configuration });
}); });
}); });
@ -57,7 +76,7 @@ describe('~/environments/environment_details/components/kubernetes/kubernetes_ta
])( ])(
'when activeTab is %s, it activates the right tab and emit the correct tab name when switching', 'when activeTab is %s, it activates the right tab and emit the correct tab name when switching',
async (activeTab, tabIndex, newTabIndex, newActiveTab) => { async (activeTab, tabIndex, newTabIndex, newActiveTab) => {
createWrapper(activeTab); createWrapper({ activeTab });
const tabsComponent = findTabs(); const tabsComponent = findTabs();
expect(tabsComponent.props('value')).toBe(tabIndex); expect(tabsComponent.props('value')).toBe(tabIndex);

View File

@ -37,7 +37,7 @@ describe('MarkdownViewer', () => {
describe('success', () => { describe('success', () => {
beforeEach(() => { beforeEach(() => {
mock mock
.onPost(`${gon.relative_url_root}/testproject/preview_markdown`) .onPost(`${gon.relative_url_root}/testproject/-/preview_markdown`)
.replyOnce(HTTP_STATUS_OK, { .replyOnce(HTTP_STATUS_OK, {
body: '<b>testing</b> {{gl_md_img_1}}', body: '<b>testing</b> {{gl_md_img_1}}',
}); });
@ -61,7 +61,7 @@ describe('MarkdownViewer', () => {
createComponent({ filePath: 'foo/test.md', commitSha: 'abcdef' }); createComponent({ filePath: 'foo/test.md', commitSha: 'abcdef' });
expect(axios.post).toHaveBeenCalledWith( expect(axios.post).toHaveBeenCalledWith(
`${gon.relative_url_root}/testproject/preview_markdown`, `${gon.relative_url_root}/testproject/-/preview_markdown`,
{ path: 'foo/test.md', text: '* Test', ref: 'abcdef' }, { path: 'foo/test.md', text: '* Test', ref: 'abcdef' },
expect.any(Object), expect.any(Object),
); );

View File

@ -35,13 +35,13 @@ describe('markdownPreviewPath', () => {
it('returns correct data sources', () => { it('returns correct data sources', () => {
expect(markdownPreviewPath({ fullPath: 'project/group', iid: '2' })).toBe( expect(markdownPreviewPath({ fullPath: 'project/group', iid: '2' })).toBe(
'/foobar/project/group/preview_markdown?target_type=WorkItem&target_id=2', '/foobar/project/group/-/preview_markdown?target_type=WorkItem&target_id=2',
); );
}); });
it('returns correct data sources with group context', () => { it('returns correct data sources with group context', () => {
expect(markdownPreviewPath({ fullPath: 'group', iid: '2', isGroup: true })).toBe( expect(markdownPreviewPath({ fullPath: 'group', iid: '2', isGroup: true })).toBe(
'/foobar/groups/group/preview_markdown?target_type=WorkItem&target_id=2', '/foobar/groups/group/-/preview_markdown?target_type=WorkItem&target_id=2',
); );
}); });
}); });

View File

@ -117,7 +117,7 @@ describe('WebIDE', () => {
}; };
const markdownPreview = 'test preview_markdown result'; const markdownPreview = 'test preview_markdown result';
mockServer.post('/:namespace/:project/preview_markdown', () => ({ mockServer.post('/:namespace/:project/-/preview_markdown', () => ({
body: markdownPreview, body: markdownPreview,
})); }));

View File

@ -78,8 +78,8 @@ RSpec.describe Mutations::Commits::Create do
expect(subject[:errors]).to be_empty expect(subject[:errors]).to be_empty
expect_to_contain_deltas([ expect_to_contain_deltas([
a_hash_including(a_mode: '0', b_mode: '100644', new_file: true, new_path: file_path) a_hash_including(a_mode: '0', b_mode: '100644', new_file: true, new_path: file_path)
]) ])
end end
end end
@ -119,13 +119,13 @@ RSpec.describe Mutations::Commits::Create do
expect(subject[:errors]).to be_empty expect(subject[:errors]).to be_empty
expect_to_contain_deltas([ expect_to_contain_deltas([
a_hash_including(a_mode: '0', b_mode: '100644', new_path: 'foo/foobar'), a_hash_including(a_mode: '0', b_mode: '100644', new_path: 'foo/foobar'),
a_hash_including(deleted_file: true, new_path: 'README.md'), a_hash_including(deleted_file: true, new_path: 'README.md'),
a_hash_including(deleted_file: true, new_path: 'LICENSE'), a_hash_including(deleted_file: true, new_path: 'LICENSE'),
a_hash_including(new_file: true, new_path: 'LICENSE.md'), a_hash_including(new_file: true, new_path: 'LICENSE.md'),
a_hash_including(new_file: false, new_path: 'VERSION'), a_hash_including(new_file: false, new_path: 'VERSION'),
a_hash_including(a_mode: '100644', b_mode: '100755', new_path: 'CHANGELOG') a_hash_including(a_mode: '100644', b_mode: '100755', new_path: 'CHANGELOG')
]) ])
end end
end end
@ -168,8 +168,8 @@ RSpec.describe Mutations::Commits::Create do
expect(subject[:content]).to eq(actions.pluck(:content)) expect(subject[:content]).to eq(actions.pluck(:content))
expect_to_contain_deltas([ expect_to_contain_deltas([
a_hash_including(a_mode: '0', b_mode: '100644', new_file: true, new_path: 'ANOTHER_FILE.md') a_hash_including(a_mode: '0', b_mode: '100644', new_file: true, new_path: 'ANOTHER_FILE.md')
]) ])
end end
end end

View File

@ -20,24 +20,24 @@ RSpec.describe Resolvers::Environments::NestedEnvironmentsResolver, feature_cate
it 'finds the nested environments when status matches' do it 'finds the nested environments when status matches' do
expect(resolve_nested_environments(status: :created).to_a.pluck(:name, :size)) expect(resolve_nested_environments(status: :created).to_a.pluck(:name, :size))
.to match_array([ .to match_array([
['test', 1], ['test', 1],
['folder1', 2], ['folder1', 2],
['folder2', 1] ['folder2', 1]
]) ])
end end
it 'finds the nested environments when searching by name' do it 'finds the nested environments when searching by name' do
expect(resolve_nested_environments(search: 'folder2').to_a.pluck(:name, :size)) expect(resolve_nested_environments(search: 'folder2').to_a.pluck(:name, :size))
.to match_array([ .to match_array([
['folder2', 1] ['folder2', 1]
]) ])
end end
it 'finds the nested environments when name matches exactly' do it 'finds the nested environments when name matches exactly' do
expect(resolve_nested_environments(name: 'test').to_a.pluck(:name, :size)) expect(resolve_nested_environments(name: 'test').to_a.pluck(:name, :size))
.to match_array([ .to match_array([
['test', 1] ['test', 1]
]) ])
end end
end end

View File

@ -70,13 +70,13 @@ RSpec.describe Gitlab::Analytics::UsageTrends::WorkersArgumentBuilder do
it 'uses custom min/max for ids' do it 'uses custom min/max for ids' do
expect(subject).to eq([ expect(subject).to eq([
[ [
users_measurement_identifier, users_measurement_identifier,
min_id, min_id,
max_id, max_id,
recorded_at recorded_at
] ]
]) ])
end end
end end
end end

View File

@ -103,11 +103,11 @@ RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedOperationalVulnerabili
it 'drops Cluster Image Scanning and Custom Vulnerabilities without any Findings' do it 'drops Cluster Image Scanning and Custom Vulnerabilities without any Findings' do
expect(vulnerabilities.pluck(:id)).to match_array([ expect(vulnerabilities.pluck(:id)).to match_array([
vulnerability_with_finding.id, vulnerability_with_finding.id,
vulnerability_without_finding.id, vulnerability_without_finding.id,
cis_vulnerability_without_finding.id, cis_vulnerability_without_finding.id,
custom_vulnerability_without_finding.id custom_vulnerability_without_finding.id
]) ])
expect { background_migration.perform }.to change(vulnerabilities, :count).by(-2) expect { background_migration.perform }.to change(vulnerabilities, :count).by(-2)

View File

@ -23,7 +23,7 @@ RSpec.describe Gitlab::BacktraceCleaner do
expect(described_class.clean_backtrace(backtrace)) expect(described_class.clean_backtrace(backtrace))
.to eq([ .to eq([
"lib/gitlab/gitaly_client.rb:294:in `block (2 levels) in migrate'", "lib/gitlab/gitaly_client.rb:294:in `block (2 levels) in migrate'",
"lib/gitlab/gitaly_client.rb:331:in `allow_n_plus_1_calls'", "lib/gitlab/gitaly_client.rb:331:in `allow_n_plus_1_calls'",
"lib/gitlab/gitaly_client.rb:280:in `block in migrate'", "lib/gitlab/gitaly_client.rb:280:in `block in migrate'",
"lib/gitlab/gitaly_client.rb:278:in `migrate'", "lib/gitlab/gitaly_client.rb:278:in `migrate'",
@ -32,7 +32,7 @@ RSpec.describe Gitlab::BacktraceCleaner do
"app/models/repository.rb:1047:in `find_commit'", "app/models/repository.rb:1047:in `find_commit'",
"app/models/repository.rb:113:in `commit'", "app/models/repository.rb:113:in `commit'",
"ee/lib/gitlab/jira/middleware.rb:15:in `call'" "ee/lib/gitlab/jira/middleware.rb:15:in `call'"
]) ])
end end
end end
end end

View File

@ -69,9 +69,9 @@ RSpec.describe Gitlab::Ci::Config::Entry::IdToken do
expect(id_token).not_to be_valid expect(id_token).not_to be_valid
expect(id_token.errors).to match_array([ expect(id_token.errors).to match_array([
'id token config missing required keys: aud', 'id token config missing required keys: aud',
'id token aud should be an array of strings or a string' 'id token aud should be an array of strings or a string'
]) ])
end end
end end
@ -84,8 +84,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::IdToken do
expect(id_token).not_to be_valid expect(id_token).not_to be_valid
expect(id_token.errors).to match_array([ expect(id_token.errors).to match_array([
'id token config contains unknown keys: unknown' 'id token config contains unknown keys: unknown'
]) ])
end end
end end
end end

View File

@ -6,12 +6,12 @@ RSpec.describe Gitlab::Ci::SecureFiles::X509Name do
describe '.parse' do describe '.parse' do
it 'parses an X509Name object into a hash format' do it 'parses an X509Name object into a hash format' do
sample = OpenSSL::X509::Name.new([ sample = OpenSSL::X509::Name.new([
['C', 'Test Country'], ['C', 'Test Country'],
['O', 'Test Org Name'], ['O', 'Test Org Name'],
['OU', 'Test Org Unit'], ['OU', 'Test Org Unit'],
['CN', 'Test Common Name'], ['CN', 'Test Common Name'],
['UID', 'Test UID'] ['UID', 'Test UID']
]) ])
parsed_sample = described_class.parse(sample) parsed_sample = described_class.parse(sample)

View File

@ -9,10 +9,10 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
let(:single_sticking_object) { Set.new([[ActiveRecord::Base.sticking, :user, 99]]) } let(:single_sticking_object) { Set.new([[ActiveRecord::Base.sticking, :user, 99]]) }
let(:multiple_sticking_objects) do let(:multiple_sticking_objects) do
Set.new([ Set.new([
[ActiveRecord::Base.sticking, :user, 42], [ActiveRecord::Base.sticking, :user, 42],
[ActiveRecord::Base.sticking, :runner, '123456789'], [ActiveRecord::Base.sticking, :runner, '123456789'],
[ActiveRecord::Base.sticking, :runner, '1234'] [ActiveRecord::Base.sticking, :runner, '1234']
]) ])
end end
after do after do

View File

@ -26,10 +26,10 @@ RSpec.describe ::Gitlab::Database::LoadBalancing::ServiceDiscovery::Sampler do
it 'samples random ports across all hosts' do it 'samples random ports across all hosts' do
expect(sampler.sample(addresses)).to eq([ expect(sampler.sample(addresses)).to eq([
address_class.new("127.0.0.1", 6432), address_class.new("127.0.0.1", 6432),
address_class.new("127.0.0.2", 6435), address_class.new("127.0.0.2", 6435),
address_class.new("127.0.0.1", 6435) address_class.new("127.0.0.1", 6435)
]) ])
end end
it 'returns the same answer for the same input when called multiple times' do it 'returns the same answer for the same input when called multiple times' do

View File

@ -83,7 +83,11 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
'zoekt_repositories.project_identifier', 'zoekt_repositories.project_identifier',
'zoekt_tasks.project_identifier', 'zoekt_tasks.project_identifier',
'project_audit_events.project_id', 'project_audit_events.project_id',
'group_audit_events.group_id' 'group_audit_events.group_id',
# aggregated table, a worker ensures eventual consistency
'analytics_cycle_analytics_issue_stage_events.group_id',
# aggregated table, a worker ensures eventual consistency
'analytics_cycle_analytics_merge_request_stage_events.group_id'
] ]
end end

View File

@ -23,9 +23,9 @@ RSpec.describe Gitlab::Diff::InlineDiff do
it 'finds all inline diffs', :aggregate_failures do it 'finds all inline diffs', :aggregate_failures do
expect(subject[0]).to eq([Gitlab::MarkerRange.new(3, 6, mode: :deletion)]) expect(subject[0]).to eq([Gitlab::MarkerRange.new(3, 6, mode: :deletion)])
expect(subject[1]).to eq([ expect(subject[1]).to eq([
Gitlab::MarkerRange.new(3, 3, mode: :addition), Gitlab::MarkerRange.new(3, 3, mode: :addition),
Gitlab::MarkerRange.new(17, 22, mode: :addition) Gitlab::MarkerRange.new(17, 22, mode: :addition)
]) ])
end end
end end
end end

View File

@ -408,17 +408,17 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
context 'when directions are project.id DESC, issues.id ASC' do context 'when directions are project.id DESC, issues.id ASC' do
let(:order) do let(:order) do
Gitlab::Pagination::Keyset::Order.build([ Gitlab::Pagination::Keyset::Order.build([
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: 'projects_id', attribute_name: 'projects_id',
order_expression: Issue.arel_table[:projects_id].asc, order_expression: Issue.arel_table[:projects_id].asc,
sql_type: 'integer', sql_type: 'integer',
nullable: :not_nullable nullable: :not_nullable
), ),
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: :id, attribute_name: :id,
order_expression: Issue.arel_table[:id].asc order_expression: Issue.arel_table[:id].asc
) )
]) ])
end end
let(:expected_order) { issues.sort_by { |issue| [issue.project_id, issue.id] } } let(:expected_order) { issues.sort_by { |issue| [issue.project_id, issue.id] } }
@ -439,17 +439,17 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
context 'when directions are projects.id DESC, issues.id ASC' do context 'when directions are projects.id DESC, issues.id ASC' do
let(:order) do let(:order) do
Gitlab::Pagination::Keyset::Order.build([ Gitlab::Pagination::Keyset::Order.build([
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: 'projects_id', attribute_name: 'projects_id',
order_expression: Issue.arel_table[:projects_id].desc, order_expression: Issue.arel_table[:projects_id].desc,
sql_type: 'integer', sql_type: 'integer',
nullable: :not_nullable nullable: :not_nullable
), ),
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: :id, attribute_name: :id,
order_expression: Issue.arel_table[:id].asc order_expression: Issue.arel_table[:id].asc
) )
]) ])
end end
let(:expected_order) { issues.sort_by { |issue| [issue.project_id * -1, issue.id] } } let(:expected_order) { issues.sort_by { |issue| [issue.project_id * -1, issue.id] } }
@ -470,12 +470,12 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
context 'when directions are projects.name ASC, projects.id ASC, issues.id ASC' do context 'when directions are projects.name ASC, projects.id ASC, issues.id ASC' do
let(:order) do let(:order) do
Gitlab::Pagination::Keyset::Order.build([ Gitlab::Pagination::Keyset::Order.build([
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: 'projects_name', attribute_name: 'projects_name',
order_expression: Issue.arel_table[:projects_name].asc, order_expression: Issue.arel_table[:projects_name].asc,
sql_type: 'character varying', sql_type: 'character varying',
nullable: :not_nullable nullable: :not_nullable
), ),
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: 'projects_id', attribute_name: 'projects_id',
order_expression: Issue.arel_table[:projects_id].asc, order_expression: Issue.arel_table[:projects_id].asc,
@ -486,7 +486,7 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
attribute_name: :id, attribute_name: :id,
order_expression: Issue.arel_table[:id].asc order_expression: Issue.arel_table[:id].asc
) )
]) ])
end end
let(:expected_order) { issues.sort_by { |issue| [issue.project.name, issue.project.id, issue.id] } } let(:expected_order) { issues.sort_by { |issue| [issue.project.name, issue.project.id, issue.id] } }
@ -506,17 +506,17 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
let(:order) do let(:order) do
Gitlab::Pagination::Keyset::Order.build([ Gitlab::Pagination::Keyset::Order.build([
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: 'projects_name', attribute_name: 'projects_name',
order_expression: Issue.arel_table[:projects_name].asc, order_expression: Issue.arel_table[:projects_name].asc,
sql_type: 'character varying', sql_type: 'character varying',
nullable: :nulls_last nullable: :nulls_last
), ),
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: :id, attribute_name: :id,
order_expression: Issue.arel_table[:id].asc order_expression: Issue.arel_table[:id].asc
) )
]) ])
end end
let(:expected_order) { issues.sort_by { |issue| [issue.id] } } let(:expected_order) { issues.sort_by { |issue| [issue.id] } }

View File

@ -523,101 +523,101 @@ RSpec.describe Gitlab::UsageData::Topology do
receive(:query) receive(:query)
.with(/gitlab_usage_ping:ops:rate/) .with(/gitlab_usage_ping:ops:rate/)
.and_return(result || [ .and_return(result || [
{ {
'metric' => { 'component' => 'http_requests', 'service' => 'workhorse' }, 'metric' => { 'component' => 'http_requests', 'service' => 'workhorse' },
'value' => [1000, '0.01'] 'value' => [1000, '0.01']
} }
]) ])
end end
def receive_query_apdex_ratio_query(result: nil) def receive_query_apdex_ratio_query(result: nil)
receive(:query) receive(:query)
.with(/gitlab_usage_ping:sql_duration_apdex:ratio_rate5m/) .with(/gitlab_usage_ping:sql_duration_apdex:ratio_rate5m/)
.and_return(result || [ .and_return(result || [
{ {
'metric' => {}, 'metric' => {},
'value' => [1000, '0.996'] 'value' => [1000, '0.996']
} }
]) ])
end end
def receive_node_memory_query(result: nil) def receive_node_memory_query(result: nil)
receive(:query) receive(:query)
.with(/node_memory_total_bytes/, an_instance_of(Hash)) .with(/node_memory_total_bytes/, an_instance_of(Hash))
.and_return(result || [ .and_return(result || [
{ {
'metric' => { 'instance' => 'instance1:8080' }, 'metric' => { 'instance' => 'instance1:8080' },
'value' => [1000, '512'] 'value' => [1000, '512']
}, },
{ {
'metric' => { 'instance' => 'instance2:8090' }, 'metric' => { 'instance' => 'instance2:8090' },
'value' => [1000, '1024'] 'value' => [1000, '1024']
} }
]) ])
end end
def receive_node_memory_utilization_query(result: nil) def receive_node_memory_utilization_query(result: nil)
receive(:query) receive(:query)
.with(/node_memory_utilization/, an_instance_of(Hash)) .with(/node_memory_utilization/, an_instance_of(Hash))
.and_return(result || [ .and_return(result || [
{ {
'metric' => { 'instance' => 'instance1:8080' }, 'metric' => { 'instance' => 'instance1:8080' },
'value' => [1000, '0.45'] 'value' => [1000, '0.45']
}, },
{ {
'metric' => { 'instance' => 'instance2:8090' }, 'metric' => { 'instance' => 'instance2:8090' },
'value' => [1000, '0.25'] 'value' => [1000, '0.25']
} }
]) ])
end end
def receive_node_cpu_count_query(result: nil) def receive_node_cpu_count_query(result: nil)
receive(:query) receive(:query)
.with(/node_cpus/, an_instance_of(Hash)) .with(/node_cpus/, an_instance_of(Hash))
.and_return(result || [ .and_return(result || [
{ {
'metric' => { 'instance' => 'instance2:8090' }, 'metric' => { 'instance' => 'instance2:8090' },
'value' => [1000, '16'] 'value' => [1000, '16']
}, },
{ {
'metric' => { 'instance' => 'instance1:8080' }, 'metric' => { 'instance' => 'instance1:8080' },
'value' => [1000, '8'] 'value' => [1000, '8']
} }
]) ])
end end
def receive_node_cpu_utilization_query(result: nil) def receive_node_cpu_utilization_query(result: nil)
receive(:query) receive(:query)
.with(/node_cpu_utilization/, an_instance_of(Hash)) .with(/node_cpu_utilization/, an_instance_of(Hash))
.and_return(result || [ .and_return(result || [
{ {
'metric' => { 'instance' => 'instance2:8090' }, 'metric' => { 'instance' => 'instance2:8090' },
'value' => [1000, '0.2'] 'value' => [1000, '0.2']
}, },
{ {
'metric' => { 'instance' => 'instance1:8080' }, 'metric' => { 'instance' => 'instance1:8080' },
'value' => [1000, '0.1'] 'value' => [1000, '0.1']
} }
]) ])
end end
def receive_node_uname_info_query(result: nil) def receive_node_uname_info_query(result: nil)
receive(:query) receive(:query)
.with('node_uname_info') .with('node_uname_info')
.and_return(result || [ .and_return(result || [
{ {
"metric" => { "metric" => {
"__name__" => "node_uname_info", "__name__" => "node_uname_info",
"domainname" => "(none)", "domainname" => "(none)",
"instance" => "instance1:9100", "instance" => "instance1:9100",
"job" => "node_exporter", "job" => "node_exporter",
"machine" => "x86_64", "machine" => "x86_64",
"nodename" => "instance1", "nodename" => "instance1",
"release" => "4.19.76-linuxkit", "release" => "4.19.76-linuxkit",
"sysname" => "Linux" "sysname" => "Linux"
}, },
"value" => [1592463033.359, "1"] "value" => [1592463033.359, "1"]
}, },
{ {
"metric" => { "metric" => {
"__name__" => "node_uname_info", "__name__" => "node_uname_info",
@ -631,17 +631,17 @@ RSpec.describe Gitlab::UsageData::Topology do
}, },
"value" => [1592463033.359, "1"] "value" => [1592463033.359, "1"]
} }
]) ])
end end
def receive_node_service_memory_rss_query(result: nil) def receive_node_service_memory_rss_query(result: nil)
receive(:query) receive(:query)
.with(/process_resident_memory_bytes/, an_instance_of(Hash)) .with(/process_resident_memory_bytes/, an_instance_of(Hash))
.and_return(result || [ .and_return(result || [
{ {
'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' }, 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
'value' => [1000, '300'] 'value' => [1000, '300']
}, },
{ {
'metric' => { 'instance' => 'instance1:8090', 'job' => 'gitlab-sidekiq' }, 'metric' => { 'instance' => 'instance1:8090', 'job' => 'gitlab-sidekiq' },
'value' => [1000, '303'] 'value' => [1000, '303']
@ -655,44 +655,44 @@ RSpec.describe Gitlab::UsageData::Topology do
'metric' => { 'instance' => 'instance2:9121', 'job' => 'redis' }, 'metric' => { 'instance' => 'instance2:9121', 'job' => 'redis' },
'value' => [1000, '402'] 'value' => [1000, '402']
} }
]) ])
end end
def receive_node_service_memory_uss_query(result: nil) def receive_node_service_memory_uss_query(result: nil)
receive(:query) receive(:query)
.with(/process_unique_memory_bytes/, an_instance_of(Hash)) .with(/process_unique_memory_bytes/, an_instance_of(Hash))
.and_return(result || [ .and_return(result || [
{ {
'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' }, 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
'value' => [1000, '301'] 'value' => [1000, '301']
} }
]) ])
end end
def receive_node_service_memory_pss_query(result: nil) def receive_node_service_memory_pss_query(result: nil)
receive(:query) receive(:query)
.with(/process_proportional_memory_bytes/, an_instance_of(Hash)) .with(/process_proportional_memory_bytes/, an_instance_of(Hash))
.and_return(result || [ .and_return(result || [
{ {
'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' }, 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
'value' => [1000, '302'] 'value' => [1000, '302']
}, },
{ {
'metric' => { 'instance' => 'instance2:8090', 'job' => 'gitlab-sidekiq' }, 'metric' => { 'instance' => 'instance2:8090', 'job' => 'gitlab-sidekiq' },
'value' => [1000, '401'] 'value' => [1000, '401']
} }
]) ])
end end
def receive_node_service_process_count_query(result: nil) def receive_node_service_process_count_query(result: nil)
receive(:query) receive(:query)
.with(/service_process:count/, an_instance_of(Hash)) .with(/service_process:count/, an_instance_of(Hash))
.and_return(result || [ .and_return(result || [
# instance 1 # instance 1
{ {
'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' }, 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
'value' => [1000, '10'] 'value' => [1000, '10']
}, },
{ {
'metric' => { 'instance' => 'instance1:8090', 'job' => 'gitlab-sidekiq' }, 'metric' => { 'instance' => 'instance1:8090', 'job' => 'gitlab-sidekiq' },
'value' => [1000, '5'] 'value' => [1000, '5']
@ -710,23 +710,23 @@ RSpec.describe Gitlab::UsageData::Topology do
'metric' => { 'instance' => 'instance2:8080', 'job' => 'registry' }, 'metric' => { 'instance' => 'instance2:8080', 'job' => 'registry' },
'value' => [1000, '1'] 'value' => [1000, '1']
} }
]) ])
end end
def receive_node_service_app_server_workers_query(result: nil) def receive_node_service_app_server_workers_query(result: nil)
receive(:query) receive(:query)
.with(/app_server_workers/, an_instance_of(Hash)) .with(/app_server_workers/, an_instance_of(Hash))
.and_return(result || [ .and_return(result || [
# instance 1 # instance 1
{ {
'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails', 'server' => 'puma' }, 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails', 'server' => 'puma' },
'value' => [1000, '2'] 'value' => [1000, '2']
}, },
# instance 2 # instance 2
{ {
'metric' => { 'instance' => 'instance2:8080', 'job' => 'gitlab-rails', 'server' => 'puma' }, 'metric' => { 'instance' => 'instance2:8080', 'job' => 'gitlab-rails', 'server' => 'puma' },
'value' => [1000, '1'] 'value' => [1000, '1']
} }
]) ])
end end
end end

View File

@ -42,10 +42,10 @@ RSpec.describe ProjectGroupLink, feature_category: :groups_and_projects do
it 'returns all records which are greater than Guests access' do it 'returns all records which are greater than Guests access' do
expect(described_class.non_guests).to match_array([ expect(described_class.non_guests).to match_array([
project_group_link_reporter, project_group_link_reporter,
project_group_link_developer, project_group_link_developer,
project_group_link_maintainer project_group_link_maintainer
]) ])
end end
end end
end end

View File

@ -22,7 +22,7 @@ RSpec.describe UserPreference, feature_category: :user_profile do
using RSpec::Parameterized::TableSyntax using RSpec::Parameterized::TableSyntax
where(color: [ where(color: [
'#000000', '#000000',
'#123456', '#123456',
'#abcdef', '#abcdef',
'#AbCdEf', '#AbCdEf',
@ -35,7 +35,7 @@ RSpec.describe UserPreference, feature_category: :user_profile do
'#fff', '#fff',
'#fFf', '#fFf',
'' ''
]) ])
with_them do with_them do
it { is_expected.to allow_value(color).for(:diffs_deletion_color) } it { is_expected.to allow_value(color).for(:diffs_deletion_color) }
@ -43,14 +43,14 @@ RSpec.describe UserPreference, feature_category: :user_profile do
end end
where(color: [ where(color: [
'#1', '#1',
'#12', '#12',
'#1234', '#1234',
'#12345', '#12345',
'#1234567', '#1234567',
'123456', '123456',
'#12345x' '#12345x'
]) ])
with_them do with_them do
it { is_expected.not_to allow_value(color).for(:diffs_deletion_color) } it { is_expected.not_to allow_value(color).for(:diffs_deletion_color) }

View File

@ -28,7 +28,7 @@ RSpec.describe API::GroupImport, feature_category: :importers do
describe 'POST /groups/import' do describe 'POST /groups/import' do
let(:file_upload) { fixture_file_upload(file) } let(:file_upload) { fixture_file_upload(file) }
let(:params) do let(:base_params) do
{ {
path: 'test-import-group', path: 'test-import-group',
name: 'test-import-group', name: 'test-import-group',
@ -36,6 +36,8 @@ RSpec.describe API::GroupImport, feature_category: :importers do
} }
end end
let(:params) { base_params }
subject { upload_archive(file_upload, workhorse_headers, params) } subject { upload_archive(file_upload, workhorse_headers, params) }
shared_examples 'when all params are correct' do shared_examples 'when all params are correct' do
@ -256,7 +258,22 @@ RSpec.describe API::GroupImport, feature_category: :importers do
expect(Group.last.organization_id).to eq(current_organization.id) expect(Group.last.organization_id).to eq(current_organization.id)
end end
include_examples 'when all params are correct' context 'when importing to a parent group' do
let_it_be(:group) { create(:group, organization: current_organization) }
before do
group.add_owner(user)
end
it 'creates new group and accepts request' do
params[:parent_id] = group.id
subject
expect(response).to have_gitlab_http_status(:accepted)
expect(group.children.count).to eq(1)
end
end
end end
context 'and current organization is not defined' do context 'and current organization is not defined' do
@ -270,6 +287,25 @@ RSpec.describe API::GroupImport, feature_category: :importers do
end end
end end
context 'when organization_id param is different than parent group organization' do
let_it_be(:current_organization) { create(:organization, users: [user]) }
let(:params) { base_params.merge(organization_id: current_organization.id) }
before do
group.add_owner(user)
end
it 'rejects the request' do
params[:parent_id] = group.id
subject
error_message = "You can't create a group in a different organization than the parent group."
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["message"]).to include(error_message)
end
end
def upload_archive(file, headers = {}, params = {}) def upload_archive(file, headers = {}, params = {})
workhorse_finalize( workhorse_finalize(
api('/groups/import', user), api('/groups/import', user),

View File

@ -77,11 +77,11 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p
describe '#referenced_merge_requests' do describe '#referenced_merge_requests' do
it 'returns the referenced merge requests' do it 'returns the referenced merge requests' do
expect(service.referenced_merge_requests(issue)).to match_array([ expect(service.referenced_merge_requests(issue)).to match_array([
closing_mr, closing_mr,
closing_mr_other_project, closing_mr_other_project,
referencing_mr, referencing_mr,
referencing_mr_other_project referencing_mr_other_project
]) ])
end end
it 'excludes cross project references if the user cannot read cross project' do it 'excludes cross project references if the user cannot read cross project' do

View File

@ -205,25 +205,25 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
it 'deduplicates (except cwe and wasc) and sorts the vulnerabilities by severity (desc) then by compare key' do it 'deduplicates (except cwe and wasc) and sorts the vulnerabilities by severity (desc) then by compare key' do
expect(merged_report.findings).to( expect(merged_report.findings).to(
eq([ eq([
finding_cwe_2, finding_cwe_2,
finding_wasc_2, finding_wasc_2,
finding_cwe_1, finding_cwe_1,
finding_id_2_loc_2, finding_id_2_loc_2,
finding_id_2_loc_1, finding_id_2_loc_1,
finding_wasc_1, finding_wasc_1,
finding_id_1 finding_id_1
]) ])
) )
end end
it 'deduplicates scanned resources' do it 'deduplicates scanned resources' do
expect(merged_report.scanned_resources).to( expect(merged_report.scanned_resources).to(
eq([ eq([
scanned_resource, scanned_resource,
scanned_resource_1, scanned_resource_1,
scanned_resource_2, scanned_resource_2,
scanned_resource_3 scanned_resource_3
]) ])
) )
end end

View File

@ -550,7 +550,6 @@
- './ee/spec/graphql/ee/types/group_type_spec.rb' - './ee/spec/graphql/ee/types/group_type_spec.rb'
- './ee/spec/graphql/ee/types/issuable_type_spec.rb' - './ee/spec/graphql/ee/types/issuable_type_spec.rb'
- './ee/spec/graphql/ee/types/issue_sort_enum_spec.rb' - './ee/spec/graphql/ee/types/issue_sort_enum_spec.rb'
- './ee/spec/graphql/ee/types/merge_request_type_spec.rb'
- './ee/spec/graphql/ee/types/milestone_type_spec.rb' - './ee/spec/graphql/ee/types/milestone_type_spec.rb'
- './ee/spec/graphql/ee/types/mutation_type_spec.rb' - './ee/spec/graphql/ee/types/mutation_type_spec.rb'
- './ee/spec/graphql/ee/types/namespace_type_spec.rb' - './ee/spec/graphql/ee/types/namespace_type_spec.rb'

View File

@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::DailyBuildGroupReportResultsWorker, feature_category: :code_testing do RSpec.describe Ci::DailyBuildGroupReportResultsWorker, feature_category: :code_testing do
describe '#perform' do describe '#perform' do
let!(:pipeline) { create(:ci_pipeline) } let_it_be(:pipeline) { create(:ci_pipeline) }
subject { described_class.new.perform(pipeline_id) } subject { described_class.new.perform(pipeline_id) }
@ -20,7 +20,7 @@ RSpec.describe Ci::DailyBuildGroupReportResultsWorker, feature_category: :code_t
end end
context 'when pipeline is not found' do context 'when pipeline is not found' do
let(:pipeline_id) { 123 } let(:pipeline_id) { non_existing_record_id }
it 'does not execute service' do it 'does not execute service' do
expect_any_instance_of(Ci::DailyBuildGroupReportResultService) expect_any_instance_of(Ci::DailyBuildGroupReportResultService)

View File

@ -6,7 +6,7 @@ toolchain go1.21.9
require ( require (
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.3.2 github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.3.2
github.com/BurntSushi/toml v1.3.2 github.com/BurntSushi/toml v1.4.0
github.com/alecthomas/chroma/v2 v2.14.0 github.com/alecthomas/chroma/v2 v2.14.0
github.com/aws/aws-sdk-go v1.51.14 github.com/aws/aws-sdk-go v1.51.14
github.com/disintegration/imaging v1.6.2 github.com/disintegration/imaging v1.6.2

View File

@ -77,8 +77,8 @@ github.com/Azure/go-autorest/autorest/to v0.4.0/go.mod h1:fE8iZBn7LQR7zH/9XU2NcP
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU= github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0=
github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/DataDog/datadog-go v4.4.0+incompatible h1:R7WqXWP4fIOAqWJtUKmSfuc7eDsBT58k9AY5WSHVosk= github.com/DataDog/datadog-go v4.4.0+incompatible h1:R7WqXWP4fIOAqWJtUKmSfuc7eDsBT58k9AY5WSHVosk=
github.com/DataDog/datadog-go v4.4.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/DataDog/datadog-go v4.4.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=

View File

@ -75,7 +75,7 @@ func NewParser(ctx context.Context, r io.Reader) (io.ReadCloser, error) {
pr: pr, pr: pr,
} }
go parser.transform(pw) go func() { _ = parser.transform(pw) }()
return parser, nil return parser, nil
} }
@ -86,25 +86,24 @@ func (p *Parser) Read(b []byte) (int, error) {
} }
// Close closes the parser and its associated resources // Close closes the parser and its associated resources
func (p *Parser) Close() error {
_ = p.pr.Close()
return p.Docs.Close() func (p *Parser) Close() error {
return errors.Join(p.pr.Close(), p.Docs.Close())
} }
func (p *Parser) transform(pw *io.PipeWriter) { func (p *Parser) transform(pw *io.PipeWriter) error {
zw := zip.NewWriter(pw) zw := zip.NewWriter(pw)
if err := p.Docs.SerializeEntries(zw); err != nil { if err := p.Docs.SerializeEntries(zw); err != nil {
_ = zw.Close() // Free underlying resources only _ = zw.Close() // Free underlying resources only
pw.CloseWithError(fmt.Errorf("lsif parser: Docs.SerializeEntries: %v", err)) pw.CloseWithError(fmt.Errorf("lsif parser: Docs.SerializeEntries: %v", err))
return return err
} }
if err := zw.Close(); err != nil { if err := zw.Close(); err != nil {
pw.CloseWithError(fmt.Errorf("lsif parser: ZipWriter.Close: %v", err)) pw.CloseWithError(fmt.Errorf("lsif parser: ZipWriter.Close: %v", err))
return return err
} }
_ = pw.Close() return pw.Close()
} }