Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
dc66c37ac3
commit
54b2cc7dfc
|
|
@ -990,6 +990,9 @@ lib/gitlab/checks/**
|
|||
/app/policies/
|
||||
/ee/app/policies/
|
||||
/ee/app/models/members/member_role.rb
|
||||
/ee/app/models/auth/member_role_ability_loader.rb
|
||||
/ee/app/models/preloaders/user_member_roles_in_groups_preloader.rb
|
||||
/ee/app/models/preloaders/user_member_roles_in_projects_preloader.rb
|
||||
/ee/app/services/member_roles/
|
||||
/ee/app/graphql/types/member_roles/
|
||||
/ee/app/graphql/mutations/member_roles/
|
||||
|
|
@ -1165,7 +1168,6 @@ lib/gitlab/checks/**
|
|||
/ee/app/helpers/ee/access_tokens_helper.rb
|
||||
/ee/app/helpers/ee/auth_helper.rb
|
||||
/ee/app/helpers/ee/personal_access_tokens_helper.rb
|
||||
/ee/app/models/auth/
|
||||
/ee/app/models/concerns/identity_verifiable.rb
|
||||
/ee/app/models/concerns/password_complexity.rb
|
||||
/ee/app/models/ee/personal_access_token.rb
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@
|
|||
if: '$CI_MERGE_REQUEST_LABELS =~ /group::global search/'
|
||||
|
||||
.if-merge-request-labels-pipeline-expedite: &if-merge-request-labels-pipeline-expedite
|
||||
if: '($CI_MERGE_REQUEST_LABELS =~ /master:(foss-)?broken/ || $CI_MERGE_REQUEST_LABELS =~ /quarantine/) && $CI_MERGE_REQUEST_LABELS =~ /pipeline:expedite/'
|
||||
if: '$CI_MERGE_REQUEST_LABELS =~ /pipeline:expedite/'
|
||||
|
||||
.if-merge-request-labels-frontend-and-feature-flag: &if-merge-request-labels-frontend-and-feature-flag
|
||||
if: '$CI_MERGE_REQUEST_LABELS =~ /frontend/ && $CI_MERGE_REQUEST_LABELS =~ /feature flag/'
|
||||
|
|
@ -664,6 +664,7 @@
|
|||
- "ee/config/custom_abilities/*.yml"
|
||||
- "doc/user/custom_roles/abilities.md"
|
||||
- "tooling/custom_roles/docs/templates/custom_abilities.md.erb"
|
||||
- "ee/{lib/,spec/}tasks/gitlab/custom_roles/*"
|
||||
|
||||
##################
|
||||
# Conditions set #
|
||||
|
|
|
|||
|
|
@ -330,6 +330,13 @@ Rails/InverseOf:
|
|||
- app/models/**/*.rb
|
||||
- ee/app/models/**/*.rb
|
||||
|
||||
Rails/MigrationTimestamp:
|
||||
Enabled: true
|
||||
Include:
|
||||
- 'db/migrate/*.rb'
|
||||
- 'db/post_migrate/*.rb'
|
||||
- 'ee/db/geo/migrate/*.rb'
|
||||
|
||||
# This is currently exiting with a rubocop exception error and should be
|
||||
# resolved hopefully a future update
|
||||
# An error occurred while Rails/UniqueValidationWithoutIndex cop was inspecting
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
Rails/MigrationTimestamp:
|
||||
Details: grace period
|
||||
|
|
@ -1,19 +1,17 @@
|
|||
<script>
|
||||
import { GlLoadingIcon, GlEmptyState, GlSprintf, GlLink, GlAlert } from '@gitlab/ui';
|
||||
import { GlEmptyState, GlSprintf, GlLink, GlAlert } from '@gitlab/ui';
|
||||
import CLUSTER_EMPTY_SVG from '@gitlab/svgs/dist/illustrations/empty-state/empty-state-clusters.svg?url';
|
||||
import { s__ } from '~/locale';
|
||||
import { helpPagePath } from '~/helpers/help_page_helper';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import { createK8sAccessConfiguration } from '~/environments/helpers/k8s_integration_helper';
|
||||
import { CLUSTER_HEALTH_SUCCESS, CLUSTER_HEALTH_ERROR } from '~/environments/constants';
|
||||
import environmentClusterAgentQuery from '~/environments/graphql/queries/environment_cluster_agent.query.graphql';
|
||||
import KubernetesStatusBar from './kubernetes_status_bar.vue';
|
||||
import KubernetesAgentInfo from './kubernetes_agent_info.vue';
|
||||
import KubernetesTabs from './kubernetes_tabs.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlLoadingIcon,
|
||||
GlEmptyState,
|
||||
KubernetesStatusBar,
|
||||
KubernetesAgentInfo,
|
||||
|
|
@ -24,29 +22,27 @@ export default {
|
|||
},
|
||||
inject: ['kasTunnelUrl'],
|
||||
props: {
|
||||
projectFullPath: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
environmentName: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
apollo: {
|
||||
environment: {
|
||||
query: environmentClusterAgentQuery,
|
||||
variables() {
|
||||
return {
|
||||
projectFullPath: this.projectFullPath,
|
||||
environmentName: this.environmentName,
|
||||
};
|
||||
},
|
||||
update(data) {
|
||||
return data?.project?.environment;
|
||||
},
|
||||
clusterAgent: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
kubernetesNamespace: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
fluxResourcePath: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
},
|
||||
|
||||
data() {
|
||||
return {
|
||||
error: null,
|
||||
|
|
@ -55,18 +51,6 @@ export default {
|
|||
};
|
||||
},
|
||||
computed: {
|
||||
isLoading() {
|
||||
return this.$apollo.queries.environment.loading;
|
||||
},
|
||||
clusterAgent() {
|
||||
return this.environment?.clusterAgent;
|
||||
},
|
||||
kubernetesNamespace() {
|
||||
return this.environment?.kubernetesNamespace || '';
|
||||
},
|
||||
fluxResourcePath() {
|
||||
return this.environment?.fluxResourcePath || '';
|
||||
},
|
||||
gitlabAgentId() {
|
||||
return getIdFromGraphQLId(this.clusterAgent.id).toString();
|
||||
},
|
||||
|
|
@ -110,8 +94,7 @@ export default {
|
|||
};
|
||||
</script>
|
||||
<template>
|
||||
<gl-loading-icon v-if="isLoading" />
|
||||
<div v-else-if="clusterAgent" class="gl-p-5 gl-bg-gray-10 gl-mt-n3">
|
||||
<div v-if="clusterAgent" class="gl-p-5 gl-bg-gray-10 gl-mt-n3">
|
||||
<div
|
||||
class="gl-display-flex gl-flex-wrap gl-justify-content-space-between gl-align-items-center"
|
||||
>
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
<!-- eslint-disable vue/multi-word-component-names -->
|
||||
<script>
|
||||
import { GlTabs, GlTab } from '@gitlab/ui';
|
||||
import { GlLoadingIcon, GlTabs, GlTab } from '@gitlab/ui';
|
||||
import { s__ } from '~/locale';
|
||||
import { getParameterValues, setUrlParams, updateHistory } from '~/lib/utils/url_utility';
|
||||
import environmentClusterAgentQuery from '~/environments/graphql/queries/environment_cluster_agent.query.graphql';
|
||||
import DeploymentHistory from './components/deployment_history.vue';
|
||||
import KubernetesOverview from './components/kubernetes/kubernetes_overview.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlLoadingIcon,
|
||||
GlTabs,
|
||||
GlTab,
|
||||
DeploymentHistory,
|
||||
|
|
@ -32,11 +35,39 @@ export default {
|
|||
default: null,
|
||||
},
|
||||
},
|
||||
apollo: {
|
||||
environment: {
|
||||
query: environmentClusterAgentQuery,
|
||||
variables() {
|
||||
return {
|
||||
projectFullPath: this.projectFullPath,
|
||||
environmentName: this.environmentName,
|
||||
};
|
||||
},
|
||||
update(data) {
|
||||
return data?.project?.environment;
|
||||
},
|
||||
result() {
|
||||
this.updateCurrentTab();
|
||||
},
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
currentTabIndex: 0,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
isLoading() {
|
||||
return this.$apollo.queries.environment.loading;
|
||||
},
|
||||
kubernetesNamespace() {
|
||||
return this.environment?.kubernetesNamespace || '';
|
||||
},
|
||||
fluxResourcePath() {
|
||||
return this.environment?.fluxResourcePath || '';
|
||||
},
|
||||
},
|
||||
i18n: {
|
||||
deploymentHistory: s__('Environments|Deployment history'),
|
||||
kubernetesOverview: s__('Environments|Kubernetes overview'),
|
||||
|
|
@ -49,19 +80,36 @@ export default {
|
|||
linkClass(index) {
|
||||
return index === this.currentTabIndex ? 'gl-inset-border-b-2-theme-accent' : '';
|
||||
},
|
||||
updateCurrentTab() {
|
||||
const hasKubernetesIntegration = this.environment?.clusterAgent;
|
||||
const selectedTabFromUrl = getParameterValues('tab');
|
||||
|
||||
// Note: We want to open the deployments history tab when
|
||||
// the Kubernetes integration is not set for the environment and
|
||||
// neither tab is preselected via URL param.
|
||||
if (!hasKubernetesIntegration && !selectedTabFromUrl.length) {
|
||||
updateHistory({
|
||||
url: setUrlParams({ tab: this.$options.params.deployments }),
|
||||
replace: true,
|
||||
});
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<gl-tabs v-model="currentTabIndex" sync-active-tab-with-query-params>
|
||||
<gl-loading-icon v-if="isLoading" />
|
||||
<gl-tabs v-else v-model="currentTabIndex" sync-active-tab-with-query-params>
|
||||
<gl-tab
|
||||
:title="$options.i18n.kubernetesOverview"
|
||||
:query-param-value="$options.params.kubernetes"
|
||||
:title-link-class="linkClass(0)"
|
||||
>
|
||||
<kubernetes-overview
|
||||
:project-full-path="projectFullPath"
|
||||
:environment-name="environmentName"
|
||||
:cluster-agent="environment.clusterAgent"
|
||||
:kubernetes-namespace="kubernetesNamespace"
|
||||
:flux-resource-path="fluxResourcePath"
|
||||
/>
|
||||
</gl-tab>
|
||||
|
||||
|
|
|
|||
|
|
@ -28,6 +28,11 @@ export default {
|
|||
required: false,
|
||||
default: null,
|
||||
},
|
||||
labelDescription: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
|
|
@ -141,6 +146,7 @@ export default {
|
|||
<template>
|
||||
<gl-form-group
|
||||
:label="humanizedTitle"
|
||||
:label-description="labelDescription"
|
||||
:label-for="fieldId"
|
||||
:invalid-feedback="__('This field is required.')"
|
||||
:state="valid"
|
||||
|
|
|
|||
|
|
@ -218,21 +218,21 @@ export const scopedSearchOptions = (state, getters) => {
|
|||
};
|
||||
|
||||
export const scopedSearchGroup = (state, getters) => {
|
||||
let name = sprintf(COMMAND_PALETTE_SEARCH_SCOPE_HEADER, { searchTerm: state.search });
|
||||
let name = sprintf(COMMAND_PALETTE_SEARCH_SCOPE_HEADER, { searchTerm: state.search }, false);
|
||||
const items = getters.scopedSearchOptions?.length > 0 ? getters.scopedSearchOptions : [];
|
||||
|
||||
switch (state.commandChar) {
|
||||
case COMMAND_PALETTE_PAGES_CHAR:
|
||||
name = sprintf(COMMAND_PALETTE_PAGES_SCOPE_HEADER, { searchTerm: state.search });
|
||||
name = sprintf(COMMAND_PALETTE_PAGES_SCOPE_HEADER, { searchTerm: state.search }, false);
|
||||
break;
|
||||
case COMMAND_PALETTE_USERS_CHAR:
|
||||
name = sprintf(COMMAND_PALETTE_USERS_SCOPE_HEADER, { searchTerm: state.search });
|
||||
name = sprintf(COMMAND_PALETTE_USERS_SCOPE_HEADER, { searchTerm: state.search }, false);
|
||||
break;
|
||||
case COMMAND_PALETTE_PROJECTS_CHAR:
|
||||
name = sprintf(COMMAND_PALETTE_PROJECTS_SCOPE_HEADER, { searchTerm: state.search });
|
||||
name = sprintf(COMMAND_PALETTE_PROJECTS_SCOPE_HEADER, { searchTerm: state.search }, false);
|
||||
break;
|
||||
case COMMAND_PALETTE_FILES_CHAR:
|
||||
name = sprintf(COMMAND_PALETTE_FILES_SCOPE_HEADER, { searchTerm: state.search });
|
||||
name = sprintf(COMMAND_PALETTE_FILES_SCOPE_HEADER, { searchTerm: state.search }, false);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
|
|
|
|||
|
|
@ -241,12 +241,59 @@ module DiffHelper
|
|||
{}
|
||||
end
|
||||
|
||||
def conflicts_with_types
|
||||
return unless merge_request.cannot_be_merged? && merge_request.source_branch_exists? && merge_request.target_branch_exists?
|
||||
|
||||
cached_conflicts_with_types(enabled: Feature.enabled?(:cached_conflicts_with_types, merge_request.project)) do
|
||||
conflicts_service = MergeRequests::Conflicts::ListService.new(merge_request, allow_tree_conflicts: true) # rubocop:disable CodeReuse/ServiceClass
|
||||
|
||||
{}.tap do |h|
|
||||
conflicts_service.conflicts.files.each do |file|
|
||||
h[file.path] = {
|
||||
conflict_type: file.conflict_type,
|
||||
conflict_type_when_renamed: file.conflict_type(when_renamed: true)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
rescue Gitlab::Git::Conflict::Resolver::ConflictSideMissing
|
||||
# This exception is raised when changes on a fork isn't present on canonical repo yet.
|
||||
# We can't list conflicts until the canonical repo gets the references from the fork
|
||||
# which happens asynchronously when updating MR.
|
||||
#
|
||||
# Return empty hash to indicate that there are no conflicts.
|
||||
{}
|
||||
end
|
||||
|
||||
def params_with_whitespace
|
||||
hide_whitespace? ? safe_params.except(:w) : safe_params.merge(w: 1)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def cached_conflicts_with_types(enabled: false)
|
||||
return yield unless enabled
|
||||
|
||||
cache_key = "merge_request_#{merge_request.id}_conflicts_with_types"
|
||||
cache = Rails.cache.read(cache_key)
|
||||
source_branch_sha = merge_request.source_branch_sha
|
||||
target_branch_sha = merge_request.target_branch_sha
|
||||
|
||||
if cache.blank? || cache[:source_sha] != source_branch_sha || cache[:target_sha] != target_branch_sha
|
||||
conflicts_files = yield
|
||||
|
||||
cache = {
|
||||
source_sha: source_branch_sha,
|
||||
target_sha: target_branch_sha,
|
||||
conflicts: conflicts_files
|
||||
}
|
||||
|
||||
Rails.cache.write(cache_key, cache)
|
||||
end
|
||||
|
||||
cache[:conflicts]
|
||||
end
|
||||
|
||||
def diff_btn(title, name, selected)
|
||||
params_copy = safe_params.dup
|
||||
params_copy[:view] = name
|
||||
|
|
|
|||
|
|
@ -6,20 +6,16 @@ module Ci
|
|||
include Ci::HasVariable
|
||||
include Ci::RawVariable
|
||||
|
||||
ROUTING_FEATURE_FLAG = :ci_partitioning_use_ci_pipeline_variables_routing_table
|
||||
|
||||
belongs_to :pipeline,
|
||||
->(pipeline_variable) { in_partition(pipeline_variable) },
|
||||
partition_foreign_key: :partition_id,
|
||||
inverse_of: :variables
|
||||
|
||||
self.primary_key = :id
|
||||
self.table_name = :p_ci_pipeline_variables
|
||||
self.sequence_name = :ci_pipeline_variables_id_seq
|
||||
|
||||
partitionable scope: :pipeline, through: {
|
||||
table: :p_ci_pipeline_variables,
|
||||
flag: ROUTING_FEATURE_FLAG
|
||||
}
|
||||
partitionable scope: :pipeline, partitioned: true
|
||||
|
||||
alias_attribute :secret_value, :value
|
||||
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ module Integrations
|
|||
ATTRIBUTES = %i[
|
||||
section type placeholder choices value checkbox_label
|
||||
title help if description
|
||||
label_description
|
||||
non_empty_password_help
|
||||
non_empty_password_title
|
||||
].concat(BOOLEAN_ATTRIBUTES).freeze
|
||||
|
|
|
|||
|
|
@ -83,7 +83,8 @@ module Users
|
|||
code_suggestions_ga_owner_alert: 81, # EE-only
|
||||
product_analytics_dashboard_feedback: 82, # EE-only
|
||||
joining_a_project_alert: 83, # EE-only
|
||||
transition_to_jihu_callout: 84
|
||||
transition_to_jihu_callout: 84,
|
||||
summarize_code_changes: 85 # EE-only
|
||||
}
|
||||
|
||||
validates :feature_name,
|
||||
|
|
|
|||
|
|
@ -6,19 +6,14 @@ module DiffFileConflictType
|
|||
|
||||
included do
|
||||
expose :conflict_type do |diff_file, options|
|
||||
conflict_file = conflict_file(options, diff_file)
|
||||
next unless options[:conflicts]
|
||||
|
||||
next unless conflict_file
|
||||
diff_file_conflict_type = options[:conflicts][diff_file.new_path]
|
||||
|
||||
conflict_file.conflict_type(diff_file)
|
||||
end
|
||||
end
|
||||
next unless diff_file_conflict_type.present?
|
||||
next diff_file_conflict_type[:conflict_type] unless diff_file.renamed_file?
|
||||
|
||||
private
|
||||
|
||||
def conflict_file(options, diff_file)
|
||||
strong_memoize(:conflict_file) do
|
||||
options[:conflicts] && options[:conflicts][diff_file.new_path]
|
||||
diff_file_conflict_type[:conflict_type_when_renamed]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ class DiffsEntity < Grape::Entity
|
|||
options.merge(
|
||||
submodule_links: submodule_links,
|
||||
code_navigation_path: code_navigation_path(diffs),
|
||||
conflicts: conflicts(allow_tree_conflicts: true)
|
||||
conflicts: conflicts_with_types
|
||||
)
|
||||
)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class DiffsMetadataEntity < DiffsEntity
|
|||
DiffFileMetadataEntity.represent(
|
||||
diffs.raw_diff_files(sorted: true),
|
||||
options.merge(
|
||||
conflicts: conflicts(allow_tree_conflicts: true)
|
||||
conflicts: conflicts_with_types
|
||||
)
|
||||
)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ module Integrations
|
|||
include RequestAwareEntity
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
expose :section, :name, :placeholder, :required, :choices, :checkbox_label
|
||||
expose :section, :name, :placeholder, :required, :choices, :checkbox_label, :label_description
|
||||
|
||||
expose :title do |field|
|
||||
non_empty_password?(field) ? field[:non_empty_password_title] : field[:title]
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class PaginatedDiffEntity < Grape::Entity
|
|||
options.merge(
|
||||
submodule_links: submodule_links,
|
||||
code_navigation_path: code_navigation_path(diffs),
|
||||
conflicts: conflicts(allow_tree_conflicts: true)
|
||||
conflicts: conflicts_with_types
|
||||
)
|
||||
)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -20,14 +20,18 @@ module ExclusiveLeaseGuard
|
|||
def try_obtain_lease
|
||||
lease = exclusive_lease.try_obtain
|
||||
|
||||
Gitlab::Instrumentation::ExclusiveLock.increment_requested_count
|
||||
|
||||
unless lease
|
||||
log_lease_taken
|
||||
return
|
||||
end
|
||||
|
||||
begin
|
||||
lease_start_time = Time.current
|
||||
yield lease
|
||||
ensure
|
||||
Gitlab::Instrumentation::ExclusiveLock.add_hold_duration(Time.current - lease_start_time)
|
||||
release_lease(lease) if lease_release?
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,7 +7,10 @@
|
|||
- preview_url = preview_markdown_path(project, target_type: model.class.name)
|
||||
|
||||
.form-group
|
||||
= form.label :description, _('Description'), class: 'gl-display-block'
|
||||
.gl-display-flex
|
||||
= form.label :description, _('Description'), class: 'gl-display-block'
|
||||
- if model.is_a?(MergeRequest)
|
||||
= render_if_exists "/shared/form_elements/summarize_merge_request"
|
||||
- if model.is_a?(MergeRequest)
|
||||
= hidden_field_tag :merge_request_diff_head_sha, model.diff_head_sha
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: cached_conflicts_with_types
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/439695
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/145107
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/442303
|
||||
milestone: '16.10'
|
||||
group: group::code review
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
name: ci_partitioning_use_ci_pipeline_variables_routing_table
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/439069
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/143334
|
||||
rollout_issue_url: https://gitlab.com/gitlab-com/gl-infra/production/-/issues/17508
|
||||
milestone: '16.10'
|
||||
group: group::pipeline execution
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -9,6 +9,7 @@ Gitlab::Database::Partitioning.register_models(
|
|||
Ci::RunnerManagerBuild,
|
||||
Ci::JobAnnotation,
|
||||
Ci::BuildMetadata,
|
||||
Ci::PipelineVariable,
|
||||
CommitStatus,
|
||||
BatchedGitRefUpdates::Deletion,
|
||||
Users::ProjectVisit,
|
||||
|
|
|
|||
|
|
@ -7,4 +7,20 @@ feature_categories:
|
|||
description: Keeps connection between user and a merge request approval rule
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/8497
|
||||
milestone: '11.7'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: approval_merge_request_rule_id
|
||||
table: approval_merge_request_rules
|
||||
sharding_key: project_id
|
||||
belongs_to: approval_merge_request_rule
|
||||
awaiting_backfill_on_parent: true
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
---
|
||||
table_name: catalog_resource_components
|
||||
classes:
|
||||
- Ci::Catalog::Resources::Component
|
||||
feature_categories:
|
||||
- pipeline_composition
|
||||
description: CI component available in the CI Catalog
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
---
|
||||
table_name: catalog_resource_versions
|
||||
classes:
|
||||
- Ci::Catalog::Resources::Version
|
||||
feature_categories:
|
||||
- pipeline_composition
|
||||
description: Catalog resource versions that contain valid CI components.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
---
|
||||
table_name: catalog_resources
|
||||
classes:
|
||||
- Ci::Catalog::Resource
|
||||
feature_categories:
|
||||
- pipeline_composition
|
||||
description: Projects containing a catalog resource.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
---
|
||||
table_name: catalog_verified_namespaces
|
||||
classes:
|
||||
- Ci::Catalog::VerifiedNamespace
|
||||
feature_categories:
|
||||
- pipeline_composition
|
||||
description: Verified namespaces in the CI catalog.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
---
|
||||
table_name: cloud_connector_access
|
||||
classes:
|
||||
- CloudConnector::Access
|
||||
feature_categories:
|
||||
- cloud_connector
|
||||
description: Information about Cloud Connector features
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
table_name: p_ci_pipeline_variables
|
||||
classes:
|
||||
- Ci::PipelineVariable
|
||||
- Ci::PipelineVariable::Partitioned
|
||||
feature_categories:
|
||||
- continuous_integration
|
||||
description: Routing table for ci_pipeline_variables
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class DropTmpFindingIndexFromVulnerabilities < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
|
||||
milestone '16.10'
|
||||
|
||||
INDEX_NAME = "tmp_index_vulnerabilities_on_id_finding_id_empty"
|
||||
|
||||
def up
|
||||
remove_concurrent_index_by_name(
|
||||
:vulnerabilities,
|
||||
INDEX_NAME
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_index(
|
||||
:vulnerabilities,
|
||||
:id,
|
||||
where: "finding_id IS NULL",
|
||||
name: INDEX_NAME
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class UnscheduleStuckBulkImportCron < Gitlab::Database::Migration[2.2]
|
||||
milestone '16.10'
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
# This is to clean up the cron schedule for BulkImports::StuckImportWorker
|
||||
# which was removed in
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/143806
|
||||
removed_job = Sidekiq::Cron::Job.find('bulk_imports_stuck_import_worker')
|
||||
removed_job.destroy if removed_job
|
||||
|
||||
sidekiq_remove_jobs(job_klasses: %w[BulkImports::StuckImportWorker])
|
||||
end
|
||||
|
||||
def down
|
||||
# This is to remove the cron schedule for a deleted job, so there is no
|
||||
# meaningful way to reverse it.
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
621a8145d831dbc652e5b20b16f15ff0490e8f36126813ee14e350a3aee4e3dd
|
||||
|
|
@ -0,0 +1 @@
|
|||
d6065732e2ec2f9dbdaa9adc16bda0cf38b52ca492605c2adf246abcaa7916b4
|
||||
|
|
@ -27508,8 +27508,6 @@ CREATE INDEX tmp_index_project_statistics_cont_registry_size ON project_statisti
|
|||
|
||||
CREATE INDEX tmp_index_project_statistics_updated_at ON project_statistics USING btree (project_id, updated_at) WHERE (repository_size > 0);
|
||||
|
||||
CREATE INDEX tmp_index_vulnerabilities_on_id_finding_id_empty ON vulnerabilities USING btree (id) WHERE (finding_id IS NULL);
|
||||
|
||||
CREATE INDEX tmp_index_vulnerability_dismissal_info ON vulnerabilities USING btree (id) WHERE ((state = 2) AND ((dismissed_at IS NULL) OR (dismissed_by_id IS NULL)));
|
||||
|
||||
CREATE INDEX tmp_index_vulnerability_overlong_title_html ON vulnerabilities USING btree (id) WHERE (length(title_html) > 800);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
stage: Govern
|
||||
stage: Data Stores
|
||||
group: Tenant Scale
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
---
|
||||
|
|
|
|||
|
|
@ -32823,6 +32823,7 @@ Name of the feature that the callout is for.
|
|||
| <a id="usercalloutfeaturenameenumsubmit_license_usage_data_banner"></a>`SUBMIT_LICENSE_USAGE_DATA_BANNER` | Callout feature name for submit_license_usage_data_banner. |
|
||||
| <a id="usercalloutfeaturenameenumsuggest_pipeline"></a>`SUGGEST_PIPELINE` | Callout feature name for suggest_pipeline. |
|
||||
| <a id="usercalloutfeaturenameenumsuggest_popover_dismissed"></a>`SUGGEST_POPOVER_DISMISSED` | Callout feature name for suggest_popover_dismissed. |
|
||||
| <a id="usercalloutfeaturenameenumsummarize_code_changes"></a>`SUMMARIZE_CODE_CHANGES` | Callout feature name for summarize_code_changes. |
|
||||
| <a id="usercalloutfeaturenameenumtabs_position_highlight"></a>`TABS_POSITION_HIGHLIGHT` | Callout feature name for tabs_position_highlight. |
|
||||
| <a id="usercalloutfeaturenameenumterraform_notification_dismissed"></a>`TERRAFORM_NOTIFICATION_DISMISSED` | Callout feature name for terraform_notification_dismissed. |
|
||||
| <a id="usercalloutfeaturenameenumthreat_monitoring_info"></a>`THREAT_MONITORING_INFO` | Callout feature name for threat_monitoring_info. |
|
||||
|
|
|
|||
|
|
@ -199,7 +199,9 @@ You can explore the GraphQL API resources with the interactive [GraphQL explorer
|
|||
Deployment frequency is calculated based on the deployments record, which is created for typical push-based deployments.
|
||||
These deployment records are not created for pull-based deployments, for example when Container Images are connected to GitLab with an agent.
|
||||
|
||||
To track DORA metrics in these cases, you can [create a deployment record](../../api/deployments.md#create-a-deployment) using the Deployments API. For more information, see [Track deployments of an external deployment tool](../../ci/environments/external_deployment_tools.md).
|
||||
To track DORA metrics in these cases, you can [create a deployment record](../../api/deployments.md#create-a-deployment) using the Deployments API.
|
||||
You must set the environment name where the deployment tier is configured, because the tier variable is specified for the given environment, not for the deployments.
|
||||
For more information, see [Track deployments of an external deployment tool](../../ci/environments/external_deployment_tools.md).
|
||||
|
||||
### Measure DORA metrics with Jira
|
||||
|
||||
|
|
|
|||
|
|
@ -50,6 +50,17 @@ they are signed.
|
|||
You can also [use the Commits API](../../../../api/commits.md#get-gpg-signature-of-a-commit)
|
||||
to check a commit's signature.
|
||||
|
||||
### Verify commits made in the web UI
|
||||
|
||||
GitLab signs the commits created using the web UI using SSH.
|
||||
To verify these commits locally, [follow the steps for SSH](ssh.md#verify-commits-locally)
|
||||
and add the following public key to the `allowed_signers` file:
|
||||
`ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIADOCCUoN3Q1UPQqUvp845fKy7haJH17qsSkVXzWXilW`.
|
||||
|
||||
```plaintext
|
||||
noreply@gitlab.com namespaces="git" ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIADOCCUoN3Q1UPQqUvp845fKy7haJH17qsSkVXzWXilW
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Fix verification problems with signed commits
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
variables:
|
||||
DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.80.1'
|
||||
DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.83.0'
|
||||
|
||||
.dast-auto-deploy:
|
||||
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${DAST_AUTO_DEPLOY_IMAGE_VERSION}"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
variables:
|
||||
AUTO_DEPLOY_IMAGE_VERSION: 'v2.80.1'
|
||||
AUTO_DEPLOY_IMAGE_VERSION: 'v2.83.0'
|
||||
|
||||
.auto-deploy:
|
||||
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
variables:
|
||||
AUTO_DEPLOY_IMAGE_VERSION: 'v2.80.1'
|
||||
AUTO_DEPLOY_IMAGE_VERSION: 'v2.83.0'
|
||||
|
||||
.auto-deploy:
|
||||
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}"
|
||||
|
|
|
|||
|
|
@ -227,7 +227,7 @@ module Gitlab
|
|||
new_path: our_path)
|
||||
end
|
||||
|
||||
def conflict_type(diff_file)
|
||||
def conflict_type(when_renamed: false)
|
||||
if ancestor_path.present?
|
||||
if our_path.present? && their_path.present?
|
||||
:both_modified
|
||||
|
|
@ -239,7 +239,7 @@ module Gitlab
|
|||
elsif our_path.present? && their_path.present?
|
||||
:both_added
|
||||
elsif their_path.blank?
|
||||
diff_file.renamed_file? ? :renamed_same_file : :removed_target_renamed_source
|
||||
when_renamed ? :renamed_same_file : :removed_target_renamed_source
|
||||
else
|
||||
:removed_source_renamed_target
|
||||
end
|
||||
|
|
|
|||
|
|
@ -31,13 +31,32 @@ module Gitlab
|
|||
def in_lock(key, ttl: 1.minute, retries: 10, sleep_sec: 0.01.seconds)
|
||||
raise ArgumentError, 'Key needs to be specified' unless key
|
||||
|
||||
Gitlab::Instrumentation::ExclusiveLock.increment_requested_count
|
||||
|
||||
lease = SleepingLock.new(key, timeout: ttl, delay: sleep_sec)
|
||||
|
||||
lease.obtain(1 + retries)
|
||||
with_instrumentation(:wait) do
|
||||
lease.obtain(1 + retries)
|
||||
end
|
||||
|
||||
yield(lease.retried?, lease)
|
||||
with_instrumentation(:hold) do
|
||||
yield(lease.retried?, lease)
|
||||
end
|
||||
ensure
|
||||
lease&.cancel
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def with_instrumentation(metric)
|
||||
start_time = Time.current
|
||||
yield
|
||||
ensure
|
||||
if metric == :wait
|
||||
Gitlab::Instrumentation::ExclusiveLock.add_wait_duration(Time.current - start_time)
|
||||
else
|
||||
Gitlab::Instrumentation::ExclusiveLock.add_hold_duration(Time.current - start_time)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,54 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Instrumentation
|
||||
class ExclusiveLock
|
||||
EXCLUSIVE_LOCK_REQUESTED_COUNT = :exclusive_lock_requested_count
|
||||
EXCLUSIVE_LOCK_WAIT_DURATION = :exclusive_lock_wait_duration_s
|
||||
EXCLUSIVE_LOCK_HOLD_DURATION = :exclusive_lock_hold_duration_s
|
||||
|
||||
class << self
|
||||
def requested_count
|
||||
::Gitlab::SafeRequestStore[EXCLUSIVE_LOCK_REQUESTED_COUNT] || 0
|
||||
end
|
||||
|
||||
def increment_requested_count
|
||||
return unless Gitlab::SafeRequestStore.active?
|
||||
|
||||
::Gitlab::SafeRequestStore[EXCLUSIVE_LOCK_REQUESTED_COUNT] ||= 0
|
||||
::Gitlab::SafeRequestStore[EXCLUSIVE_LOCK_REQUESTED_COUNT] += 1
|
||||
end
|
||||
|
||||
def wait_duration
|
||||
::Gitlab::SafeRequestStore[EXCLUSIVE_LOCK_WAIT_DURATION] || 0
|
||||
end
|
||||
|
||||
def add_wait_duration(duration)
|
||||
return unless Gitlab::SafeRequestStore.active?
|
||||
|
||||
::Gitlab::SafeRequestStore[EXCLUSIVE_LOCK_WAIT_DURATION] ||= 0
|
||||
::Gitlab::SafeRequestStore[EXCLUSIVE_LOCK_WAIT_DURATION] += duration
|
||||
end
|
||||
|
||||
def hold_duration
|
||||
::Gitlab::SafeRequestStore[EXCLUSIVE_LOCK_HOLD_DURATION] || 0
|
||||
end
|
||||
|
||||
def add_hold_duration(duration)
|
||||
return unless Gitlab::SafeRequestStore.active?
|
||||
|
||||
::Gitlab::SafeRequestStore[EXCLUSIVE_LOCK_HOLD_DURATION] ||= 0
|
||||
::Gitlab::SafeRequestStore[EXCLUSIVE_LOCK_HOLD_DURATION] += duration
|
||||
end
|
||||
end
|
||||
|
||||
def self.payload
|
||||
{
|
||||
EXCLUSIVE_LOCK_REQUESTED_COUNT => requested_count,
|
||||
EXCLUSIVE_LOCK_WAIT_DURATION => wait_duration,
|
||||
EXCLUSIVE_LOCK_HOLD_DURATION => hold_duration
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -28,6 +28,7 @@ module Gitlab
|
|||
instrument_rate_limiting_gates(payload)
|
||||
instrument_global_search_api(payload)
|
||||
instrument_ldap(payload)
|
||||
instrument_exclusive_lock(payload)
|
||||
end
|
||||
|
||||
def instrument_gitaly(payload)
|
||||
|
|
@ -139,6 +140,14 @@ module Gitlab
|
|||
payload.merge! Gitlab::Metrics::Subscribers::Ldap.payload
|
||||
end
|
||||
|
||||
def instrument_exclusive_lock(payload)
|
||||
requested_count = Gitlab::Instrumentation::ExclusiveLock.requested_count
|
||||
|
||||
return if requested_count == 0
|
||||
|
||||
payload.merge!(Gitlab::Instrumentation::ExclusiveLock.payload)
|
||||
end
|
||||
|
||||
# Returns the queuing duration for a Sidekiq job in seconds, as a float, if the
|
||||
# `enqueued_at` field or `created_at` field is available.
|
||||
#
|
||||
|
|
|
|||
|
|
@ -15802,9 +15802,6 @@ msgstr ""
|
|||
msgid "DastProfiles|Authentication URL"
|
||||
msgstr ""
|
||||
|
||||
msgid "DastProfiles|Branch missing"
|
||||
msgstr ""
|
||||
|
||||
msgid "DastProfiles|Change scanner profile"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -16000,9 +15997,6 @@ msgstr ""
|
|||
msgid "DastProfiles|Select a site profile to run a DAST scan"
|
||||
msgstr ""
|
||||
|
||||
msgid "DastProfiles|Select branch"
|
||||
msgstr ""
|
||||
|
||||
msgid "DastProfiles|Select scanner profile"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -23374,28 +23368,28 @@ msgstr ""
|
|||
msgid "GoogleCloudPlatformService|Google Cloud project number for the Workload Identity Federation."
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|ID of the Google Cloud project."
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|ID of the Workload Identity Pool provider."
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|ID of the Workload Identity Pool."
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|Location of Artifact Registry repository"
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|Location of Artifact Registry repository."
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|Project number of the Workload Identity Pool."
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|Repository of Artifact Registry"
|
||||
msgid "GoogleCloudPlatformService|Project with the Artifact Registry repository."
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|Repository of Artifact Registry."
|
||||
msgid "GoogleCloudPlatformService|Repository location"
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|Repository must be Docker format and Standard mode."
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|Repository name"
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|To improve security, use a dedicated project for resources, separate from CI/CD and identity management projects. %{link_start}Where’s my project ID? %{icon}%{link_end}"
|
||||
msgstr ""
|
||||
|
||||
msgid "GoogleCloudPlatformService|Workload Identity Pool ID"
|
||||
|
|
@ -26665,6 +26659,9 @@ msgstr ""
|
|||
msgid "Introducing Your DevOps Reports"
|
||||
msgstr ""
|
||||
|
||||
msgid "Introducing: Summarize code changes"
|
||||
msgstr ""
|
||||
|
||||
msgid "Invalid 'schemaVersion' '%{schema_version}'"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -45722,6 +45719,9 @@ msgstr ""
|
|||
msgid "SecurityTxt|When present, this will be publicly available at %{codeOpen}https://gitlab.example.com/.well-known/security.txt%{codeClose}. Maximum 2048 characters."
|
||||
msgstr ""
|
||||
|
||||
msgid "See an AI-generated summary of your code changes."
|
||||
msgstr ""
|
||||
|
||||
msgid "See example DevOps Score page in our documentation."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -48741,6 +48741,9 @@ msgstr ""
|
|||
msgid "Suite"
|
||||
msgstr ""
|
||||
|
||||
msgid "Summarize code changes"
|
||||
msgstr ""
|
||||
|
||||
msgid "Summarize comments"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,60 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require_relative '../../migration_helpers'
|
||||
|
||||
module RuboCop
|
||||
module Cop
|
||||
module Rails
|
||||
# Checks to ensure timestamp used for a migration file occurs in the past.
|
||||
# If it is not in the past, this can cause Rails to enumerate the file names until that date has passed.
|
||||
# For instance if the migration below exists:
|
||||
# 30000220000000_some_migration.rb
|
||||
# It will cause rails generator to create new migrations as an enumeration on that timestamp until 30000220 has
|
||||
# passed. We would be generating files like this:
|
||||
# 30000220000001_some_migration.rb
|
||||
# 30000220000002_some_migration.rb
|
||||
# That methodology increases the probability of collision with others contributing at the same time
|
||||
# as each file is merely enumerated by 1.
|
||||
#
|
||||
# @example
|
||||
# # bad - date is in the future
|
||||
# 30000220000000_some_migration.rb
|
||||
#
|
||||
# # good - date is in the past
|
||||
# 20240219000000_some_migration.rb
|
||||
#
|
||||
class MigrationTimestamp < RuboCop::Cop::Base
|
||||
include RangeHelp
|
||||
|
||||
MSG = 'The date of this file (`%<basename>s`) must not be in the future.'
|
||||
|
||||
def on_new_investigation
|
||||
file_path = processed_source.file_path
|
||||
basename = File.basename(file_path)
|
||||
|
||||
return unless date_named_file?(basename)
|
||||
|
||||
for_bad_filename(basename) { |range, msg| add_offense(range, message: msg) }
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
DATE_LENGTH = 14
|
||||
STARTS_WITH_DATE_REGEX = /^\d{#{DATE_LENGTH}}_/
|
||||
|
||||
def date_named_file?(basename)
|
||||
basename.match?(STARTS_WITH_DATE_REGEX)
|
||||
end
|
||||
|
||||
def for_bad_filename(basename)
|
||||
# match ActiveRecord https://api.rubyonrails.org/classes/ActiveRecord/Migration.html
|
||||
now = Time.now.utc.strftime('%Y%m%d%H%M%S') # length is 14
|
||||
|
||||
return if basename.first(DATE_LENGTH) <= now
|
||||
|
||||
yield source_range(processed_source.buffer, 1, 0), format(MSG, basename: basename)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,25 +1,20 @@
|
|||
import Vue, { nextTick } from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { GlLoadingIcon, GlEmptyState, GlAlert } from '@gitlab/ui';
|
||||
import { nextTick } from 'vue';
|
||||
import { GlEmptyState, GlAlert } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import KubernetesOverview from '~/environments/environment_details/components/kubernetes/kubernetes_overview.vue';
|
||||
import KubernetesStatusBar from '~/environments/environment_details/components/kubernetes/kubernetes_status_bar.vue';
|
||||
import KubernetesAgentInfo from '~/environments/environment_details/components/kubernetes/kubernetes_agent_info.vue';
|
||||
import KubernetesTabs from '~/environments/environment_details/components/kubernetes/kubernetes_tabs.vue';
|
||||
import environmentClusterAgentQuery from '~/environments/graphql/queries/environment_cluster_agent.query.graphql';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { agent, kubernetesNamespace, fluxResourcePathMock } from '../../../graphql/mock_data';
|
||||
import { mockKasTunnelUrl } from '../../../mock_data';
|
||||
|
||||
describe('~/environments/environment_details/components/kubernetes/kubernetes_overview.vue', () => {
|
||||
Vue.use(VueApollo);
|
||||
|
||||
let wrapper;
|
||||
|
||||
const propsData = {
|
||||
const defaultProps = {
|
||||
environmentName: 'production',
|
||||
projectFullPath: 'gitlab-group/test-project',
|
||||
kubernetesNamespace,
|
||||
fluxResourcePath: fluxResourcePathMock,
|
||||
};
|
||||
|
||||
const provide = {
|
||||
|
|
@ -37,31 +32,15 @@ describe('~/environments/environment_details/components/kubernetes/kubernetes_ov
|
|||
};
|
||||
|
||||
const createWrapper = (clusterAgent = agent) => {
|
||||
const defaultEnvironmentData = {
|
||||
data: {
|
||||
project: {
|
||||
id: '1',
|
||||
environment: {
|
||||
id: '1',
|
||||
clusterAgent,
|
||||
kubernetesNamespace,
|
||||
fluxResourcePath: fluxResourcePathMock,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
const mockApollo = createMockApollo([
|
||||
[environmentClusterAgentQuery, jest.fn().mockResolvedValue(defaultEnvironmentData)],
|
||||
]);
|
||||
|
||||
return shallowMount(KubernetesOverview, {
|
||||
apolloProvider: mockApollo,
|
||||
provide,
|
||||
propsData,
|
||||
propsData: {
|
||||
...defaultProps,
|
||||
clusterAgent,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
|
||||
const findAgentInfo = () => wrapper.findComponent(KubernetesAgentInfo);
|
||||
const findKubernetesStatusBar = () => wrapper.findComponent(KubernetesStatusBar);
|
||||
const findKubernetesTabs = () => wrapper.findComponent(KubernetesTabs);
|
||||
|
|
@ -69,13 +48,88 @@ describe('~/environments/environment_details/components/kubernetes/kubernetes_ov
|
|||
|
||||
const findAlert = () => wrapper.findComponent(GlAlert);
|
||||
|
||||
describe('when fetching data', () => {
|
||||
describe('when the agent data is present', () => {
|
||||
beforeEach(() => {
|
||||
wrapper = createWrapper();
|
||||
});
|
||||
|
||||
it('renders loading indicator', () => {
|
||||
expect(findLoadingIcon().exists()).toBe(true);
|
||||
it('renders kubernetes agent info', () => {
|
||||
expect(findAgentInfo().props('clusterAgent')).toEqual(agent);
|
||||
});
|
||||
|
||||
it('renders kubernetes tabs', () => {
|
||||
expect(findKubernetesTabs().props()).toEqual({
|
||||
namespace: kubernetesNamespace,
|
||||
configuration,
|
||||
});
|
||||
});
|
||||
|
||||
it('renders kubernetes status bar', () => {
|
||||
expect(findKubernetesStatusBar().props()).toEqual({
|
||||
clusterHealthStatus: 'success',
|
||||
configuration,
|
||||
environmentName: defaultProps.environmentName,
|
||||
fluxResourcePath: fluxResourcePathMock,
|
||||
});
|
||||
});
|
||||
|
||||
describe('Kubernetes health status', () => {
|
||||
it("doesn't set `clusterHealthStatus` when pods are still loading", async () => {
|
||||
findKubernetesTabs().vm.$emit('loading', true);
|
||||
await nextTick();
|
||||
|
||||
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('');
|
||||
});
|
||||
|
||||
it('sets `clusterHealthStatus` as error when pods emitted a failure', async () => {
|
||||
findKubernetesTabs().vm.$emit('update-failed-state', { pods: true });
|
||||
await nextTick();
|
||||
|
||||
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('error');
|
||||
});
|
||||
|
||||
it('sets `clusterHealthStatus` as success when data is loaded and no failures where emitted', () => {
|
||||
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('success');
|
||||
});
|
||||
|
||||
it('sets `clusterHealthStatus` as success after state update if there are no failures', async () => {
|
||||
findKubernetesTabs().vm.$emit('update-failed-state', { pods: true });
|
||||
await nextTick();
|
||||
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('error');
|
||||
|
||||
findKubernetesTabs().vm.$emit('update-failed-state', { pods: false });
|
||||
await nextTick();
|
||||
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('success');
|
||||
});
|
||||
});
|
||||
|
||||
describe('on cluster error', () => {
|
||||
beforeEach(() => {
|
||||
wrapper = createWrapper();
|
||||
});
|
||||
|
||||
it('shows alert with the error message', async () => {
|
||||
const error = 'Error message from pods';
|
||||
|
||||
findKubernetesTabs().vm.$emit('cluster-error', error);
|
||||
await nextTick();
|
||||
|
||||
expect(findAlert().text()).toBe(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when there is no cluster agent data', () => {
|
||||
beforeEach(() => {
|
||||
wrapper = createWrapper(null);
|
||||
});
|
||||
|
||||
it('renders empty state component', () => {
|
||||
expect(findEmptyState().props()).toMatchObject({
|
||||
title: 'No Kubernetes clusters configured',
|
||||
primaryButtonText: 'Get started',
|
||||
primaryButtonLink: '/help/ci/environments/kubernetes_dashboard',
|
||||
});
|
||||
});
|
||||
|
||||
it("doesn't render Kubernetes related components", () => {
|
||||
|
|
@ -83,117 +137,5 @@ describe('~/environments/environment_details/components/kubernetes/kubernetes_ov
|
|||
expect(findKubernetesStatusBar().exists()).toBe(false);
|
||||
expect(findKubernetesTabs().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it("doesn't render empty state", () => {
|
||||
expect(findEmptyState().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when data is fetched', () => {
|
||||
it('hides loading indicator', async () => {
|
||||
wrapper = createWrapper();
|
||||
await waitForPromises();
|
||||
|
||||
expect(findLoadingIcon().exists()).toBe(false);
|
||||
});
|
||||
|
||||
describe('and there is cluster agent data', () => {
|
||||
beforeEach(async () => {
|
||||
wrapper = createWrapper();
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('renders kubernetes agent info', () => {
|
||||
expect(findAgentInfo().props('clusterAgent')).toEqual(agent);
|
||||
});
|
||||
|
||||
it('renders kubernetes tabs', () => {
|
||||
expect(findKubernetesTabs().props()).toEqual({
|
||||
namespace: kubernetesNamespace,
|
||||
configuration,
|
||||
});
|
||||
});
|
||||
|
||||
it('renders kubernetes status bar', () => {
|
||||
expect(findKubernetesStatusBar().props()).toEqual({
|
||||
clusterHealthStatus: 'success',
|
||||
configuration,
|
||||
environmentName: propsData.environmentName,
|
||||
fluxResourcePath: fluxResourcePathMock,
|
||||
});
|
||||
});
|
||||
|
||||
describe('Kubernetes health status', () => {
|
||||
beforeEach(async () => {
|
||||
wrapper = createWrapper();
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it("doesn't set `clusterHealthStatus` when pods are still loading", async () => {
|
||||
findKubernetesTabs().vm.$emit('loading', true);
|
||||
await nextTick();
|
||||
|
||||
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('');
|
||||
});
|
||||
|
||||
it('sets `clusterHealthStatus` as error when pods emitted a failure', async () => {
|
||||
findKubernetesTabs().vm.$emit('update-failed-state', { pods: true });
|
||||
await nextTick();
|
||||
|
||||
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('error');
|
||||
});
|
||||
|
||||
it('sets `clusterHealthStatus` as success when data is loaded and no failures where emitted', () => {
|
||||
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('success');
|
||||
});
|
||||
|
||||
it('sets `clusterHealthStatus` as success after state update if there are no failures', async () => {
|
||||
findKubernetesTabs().vm.$emit('update-failed-state', { pods: true });
|
||||
await nextTick();
|
||||
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('error');
|
||||
|
||||
findKubernetesTabs().vm.$emit('update-failed-state', { pods: false });
|
||||
await nextTick();
|
||||
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('success');
|
||||
});
|
||||
});
|
||||
|
||||
describe('on cluster error', () => {
|
||||
beforeEach(async () => {
|
||||
wrapper = createWrapper();
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('shows alert with the error message', async () => {
|
||||
const error = 'Error message from pods';
|
||||
|
||||
findKubernetesTabs().vm.$emit('cluster-error', error);
|
||||
await nextTick();
|
||||
|
||||
expect(findAlert().text()).toBe(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('and there is no cluster agent data', () => {
|
||||
beforeEach(async () => {
|
||||
wrapper = createWrapper(null);
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('renders empty state component', () => {
|
||||
expect(findEmptyState().props()).toMatchObject({
|
||||
title: 'No Kubernetes clusters configured',
|
||||
primaryButtonText: 'Get started',
|
||||
primaryButtonLink: '/help/ci/environments/kubernetes_dashboard',
|
||||
});
|
||||
});
|
||||
|
||||
it("doesn't render Kubernetes related components", () => {
|
||||
expect(findAgentInfo().exists()).toBe(false);
|
||||
expect(findKubernetesStatusBar().exists()).toBe(false);
|
||||
expect(findKubernetesTabs().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,20 +1,48 @@
|
|||
import { GlTabs, GlTab } from '@gitlab/ui';
|
||||
import { GlLoadingIcon, GlTabs, GlTab } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import { nextTick } from 'vue';
|
||||
import Vue, { nextTick } from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { updateHistory, getParameterValues, setUrlParams } from '~/lib/utils/url_utility';
|
||||
import EnvironmentsDetailPage from '~/environments/environment_details/index.vue';
|
||||
import DeploymentsHistory from '~/environments/environment_details/components/deployment_history.vue';
|
||||
import KubernetesOverview from '~/environments/environment_details/components/kubernetes/kubernetes_overview.vue';
|
||||
import environmentClusterAgentQuery from '~/environments/graphql/queries/environment_cluster_agent.query.graphql';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { agent, kubernetesNamespace, fluxResourcePathMock } from '../graphql/mock_data';
|
||||
|
||||
const projectFullPath = 'gitlab-group/test-project';
|
||||
const environmentName = 'test-environment-name';
|
||||
const after = 'after';
|
||||
const before = null;
|
||||
|
||||
jest.mock('~/lib/utils/url_utility');
|
||||
|
||||
describe('~/environments/environment_details/index.vue', () => {
|
||||
Vue.use(VueApollo);
|
||||
|
||||
let wrapper;
|
||||
|
||||
const createWrapper = () => {
|
||||
const createWrapper = (clusterAgent = agent) => {
|
||||
const defaultEnvironmentData = {
|
||||
data: {
|
||||
project: {
|
||||
id: '1',
|
||||
environment: {
|
||||
id: '1',
|
||||
clusterAgent,
|
||||
kubernetesNamespace,
|
||||
fluxResourcePath: fluxResourcePathMock,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
const mockApollo = createMockApollo([
|
||||
[environmentClusterAgentQuery, jest.fn().mockResolvedValue(defaultEnvironmentData)],
|
||||
]);
|
||||
|
||||
return shallowMount(EnvironmentsDetailPage, {
|
||||
apolloProvider: mockApollo,
|
||||
propsData: {
|
||||
projectFullPath,
|
||||
environmentName,
|
||||
|
|
@ -25,34 +53,62 @@ describe('~/environments/environment_details/index.vue', () => {
|
|||
});
|
||||
};
|
||||
|
||||
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
|
||||
const findTabs = () => wrapper.findComponent(GlTabs);
|
||||
const findAllTabs = () => wrapper.findAllComponents(GlTab);
|
||||
const findTabByIndex = (index) => findAllTabs().at(index);
|
||||
const findDeploymentHistory = () => wrapper.findComponent(DeploymentsHistory);
|
||||
const findKubernetesOverview = () => wrapper.findComponent(KubernetesOverview);
|
||||
|
||||
beforeEach(() => {
|
||||
wrapper = createWrapper();
|
||||
describe('loading state', () => {
|
||||
beforeEach(() => {
|
||||
wrapper = createWrapper();
|
||||
});
|
||||
|
||||
it('renders loading indicator', () => {
|
||||
expect(findLoadingIcon().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it("doesn't render tabs", () => {
|
||||
expect(findTabs().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('hides loading indicator when the data is loaded', async () => {
|
||||
await waitForPromises();
|
||||
|
||||
expect(findLoadingIcon().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('renders tabs component with the correct props', () => {
|
||||
expect(findTabs().props('syncActiveTabWithQueryParams')).toBe(true);
|
||||
});
|
||||
describe('tabs', () => {
|
||||
beforeEach(async () => {
|
||||
wrapper = createWrapper();
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('sets proper CSS class to the active tab', () => {
|
||||
expect(findTabByIndex(0).props('titleLinkClass')).toBe('gl-inset-border-b-2-theme-accent');
|
||||
expect(findTabByIndex(1).props('titleLinkClass')).toBe('');
|
||||
});
|
||||
it('renders tabs component with the correct prop', () => {
|
||||
expect(findTabs().props('syncActiveTabWithQueryParams')).toBe(true);
|
||||
});
|
||||
|
||||
it('updates the CSS class when the active tab changes', async () => {
|
||||
findTabs().vm.$emit('input', 1);
|
||||
await nextTick();
|
||||
it('sets proper CSS class to the active tab', () => {
|
||||
expect(findTabByIndex(0).props('titleLinkClass')).toBe('gl-inset-border-b-2-theme-accent');
|
||||
expect(findTabByIndex(1).props('titleLinkClass')).toBe('');
|
||||
});
|
||||
|
||||
expect(findTabByIndex(0).props('titleLinkClass')).toBe('');
|
||||
expect(findTabByIndex(1).props('titleLinkClass')).toBe('gl-inset-border-b-2-theme-accent');
|
||||
it('updates the CSS class when the active tab changes', async () => {
|
||||
findTabs().vm.$emit('input', 1);
|
||||
await nextTick();
|
||||
|
||||
expect(findTabByIndex(0).props('titleLinkClass')).toBe('');
|
||||
expect(findTabByIndex(1).props('titleLinkClass')).toBe('gl-inset-border-b-2-theme-accent');
|
||||
});
|
||||
});
|
||||
|
||||
describe('kubernetes overview tab', () => {
|
||||
beforeEach(async () => {
|
||||
wrapper = createWrapper();
|
||||
await waitForPromises();
|
||||
});
|
||||
it('renders correct title', () => {
|
||||
expect(findTabByIndex(0).attributes('title')).toBe('Kubernetes overview');
|
||||
});
|
||||
|
|
@ -63,13 +119,20 @@ describe('~/environments/environment_details/index.vue', () => {
|
|||
|
||||
it('renders kubernetes_overview component with correct props', () => {
|
||||
expect(findKubernetesOverview().props()).toEqual({
|
||||
projectFullPath,
|
||||
environmentName,
|
||||
clusterAgent: agent,
|
||||
kubernetesNamespace,
|
||||
fluxResourcePath: fluxResourcePathMock,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deployment history tab', () => {
|
||||
beforeEach(async () => {
|
||||
wrapper = createWrapper();
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('renders correct title', () => {
|
||||
expect(findTabByIndex(1).attributes('title')).toBe('Deployment history');
|
||||
});
|
||||
|
|
@ -87,4 +150,35 @@ describe('~/environments/environment_details/index.vue', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when there is cluster agent data', () => {
|
||||
beforeEach(async () => {
|
||||
wrapper = createWrapper();
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('shows the Kubernetes overview tab as active', () => {
|
||||
expect(findTabs().props('value')).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when there is no cluster agent data', () => {
|
||||
it('navigates to the Deployments history tab if the tab was not specified in the URL', async () => {
|
||||
getParameterValues.mockReturnValue([]);
|
||||
wrapper = createWrapper(null);
|
||||
await waitForPromises();
|
||||
|
||||
expect(setUrlParams).toHaveBeenCalledWith({ tab: 'deployment-history' });
|
||||
expect(updateHistory).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("doesn't navigate to the Deployments history tab if the tab was specified in the URL", async () => {
|
||||
getParameterValues.mockReturnValue([{ tab: 'kubernetes-overview' }]);
|
||||
wrapper = createWrapper(null);
|
||||
await waitForPromises();
|
||||
|
||||
expect(setUrlParams).not.toHaveBeenCalled();
|
||||
expect(updateHistory).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -236,6 +236,16 @@ describe('DynamicField', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('with label description', () => {
|
||||
it('renders label description', () => {
|
||||
createComponent({
|
||||
labelDescription: 'This is a description',
|
||||
});
|
||||
|
||||
expect(findGlFormGroup().props('labelDescription')).toBe('This is a description');
|
||||
});
|
||||
});
|
||||
|
||||
describe('password field validations', () => {
|
||||
describe('without value', () => {
|
||||
it('requires validation', () => {
|
||||
|
|
|
|||
|
|
@ -347,6 +347,36 @@ describe('Global Search Store Getters', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('scopedSearchGroup', () => {
|
||||
beforeEach(() => {
|
||||
createState();
|
||||
});
|
||||
|
||||
it('returns the correct name', () => {
|
||||
state.search = 'pie';
|
||||
|
||||
expect(getters.scopedSearchGroup(state, {}).name).toStrictEqual('Search for `pie` in...');
|
||||
|
||||
state.commandChar = '@';
|
||||
expect(getters.scopedSearchGroup(state, {}).name).toStrictEqual(
|
||||
'Search for `pie` users in...',
|
||||
);
|
||||
});
|
||||
|
||||
it('does not escape name', () => {
|
||||
state.search = '<pie`>#$%';
|
||||
|
||||
expect(getters.scopedSearchGroup(state, {}).name).toStrictEqual(
|
||||
'Search for `<pie`>#$%` in...',
|
||||
);
|
||||
|
||||
state.commandChar = '>';
|
||||
expect(getters.scopedSearchGroup(state, {}).name).toStrictEqual(
|
||||
'Search for `<pie`>#$%` pages in...',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('autocompleteGroupedSearchOptions', () => {
|
||||
beforeEach(() => {
|
||||
createState();
|
||||
|
|
|
|||
|
|
@ -684,4 +684,147 @@ RSpec.describe DiffHelper, feature_category: :code_review_workflow do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#conflicts_with_types', :use_clean_rails_redis_caching do
|
||||
let(:merge_request) do
|
||||
create(
|
||||
:merge_request,
|
||||
:conflict,
|
||||
merge_status: 'cannot_be_merged',
|
||||
source_branch_sha: 'abc123',
|
||||
target_branch_sha: 'def456'
|
||||
)
|
||||
end
|
||||
|
||||
let(:exception) { nil }
|
||||
let(:conflict_file) { instance_double(Gitlab::Conflict::File, path: 'a') }
|
||||
let(:files) { [conflict_file] }
|
||||
|
||||
before do
|
||||
allow(helper).to receive(:merge_request).and_return(merge_request)
|
||||
|
||||
allow(conflict_file)
|
||||
.to receive(:conflict_type)
|
||||
.and_return(:removed_target_renamed_source)
|
||||
|
||||
allow(conflict_file)
|
||||
.to receive(:conflict_type)
|
||||
.with(when_renamed: true)
|
||||
.and_return(:renamed_same_file)
|
||||
|
||||
allow_next_instance_of(MergeRequests::Conflicts::ListService, merge_request, allow_tree_conflicts: true) do |svc|
|
||||
if exception.present?
|
||||
allow(svc).to receive_message_chain(:conflicts, :files).and_raise(exception)
|
||||
else
|
||||
allow(svc).to receive_message_chain(:conflicts, :files).and_return(files)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns list of conflicts indexed by path' do
|
||||
expect(helper.conflicts_with_types).to eq(
|
||||
'a' => {
|
||||
conflict_type: :removed_target_renamed_source,
|
||||
conflict_type_when_renamed: :renamed_same_file
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
context 'when merge request can be merged' do
|
||||
let(:merge_request) { create(:merge_request, merge_status: 'can_be_merged') }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(helper.conflicts_with_types).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when source branch does not exist' do
|
||||
let(:merge_request) do
|
||||
create(
|
||||
:merge_request,
|
||||
source_branch: 'i-do-no-exist',
|
||||
merge_status: 'cannot_be_merged'
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns nil' do
|
||||
expect(helper.conflicts_with_types).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when target branch does not exist' do
|
||||
let(:merge_request) do
|
||||
create(
|
||||
:merge_request,
|
||||
target_branch: 'i-do-no-exist',
|
||||
merge_status: 'cannot_be_merged'
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns nil' do
|
||||
expect(helper.conflicts_with_types).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Gitlab::Git::Conflict::Resolver::ConflictSideMissing exception is raised' do
|
||||
let(:exception) { Gitlab::Git::Conflict::Resolver::ConflictSideMissing }
|
||||
|
||||
it 'returns an empty hash' do
|
||||
expect(helper.conflicts_with_types).to eq({})
|
||||
end
|
||||
end
|
||||
|
||||
context 'when cached' do
|
||||
before do
|
||||
helper.conflicts_with_types # Cache the result
|
||||
end
|
||||
|
||||
it 'does not make a call to MergeRequests::Conflicts::ListService' do
|
||||
expect(MergeRequests::Conflicts::ListService).not_to receive(:new)
|
||||
|
||||
expect(helper.conflicts_with_types).to eq(
|
||||
'a' => {
|
||||
conflict_type: :removed_target_renamed_source,
|
||||
conflict_type_when_renamed: :renamed_same_file
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
context 'when cached_conflicts_with_types is disabled' do
|
||||
before do
|
||||
stub_feature_flags(cached_conflicts_with_types: false)
|
||||
end
|
||||
|
||||
it 'still calls MergeRequests::Conflicts::ListService' do
|
||||
expect(MergeRequests::Conflicts::ListService).to receive(:new)
|
||||
|
||||
helper.conflicts_with_types
|
||||
end
|
||||
end
|
||||
|
||||
context 'when source branch SHA changes' do
|
||||
before do
|
||||
allow(merge_request).to receive(:source_branch_sha).and_return('123abc')
|
||||
end
|
||||
|
||||
it 'calls MergeRequests::Conflicts::ListService' do
|
||||
expect(MergeRequests::Conflicts::ListService).to receive(:new)
|
||||
|
||||
helper.conflicts_with_types
|
||||
end
|
||||
end
|
||||
|
||||
context 'when target branch SHA changes' do
|
||||
before do
|
||||
allow(merge_request).to receive(:target_branch_sha).and_return('456def')
|
||||
end
|
||||
|
||||
it 'calls MergeRequests::Conflicts::ListService' do
|
||||
expect(MergeRequests::Conflicts::ListService).to receive(:new)
|
||||
|
||||
helper.conflicts_with_types
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -341,9 +341,8 @@ RSpec.describe Gitlab::Conflict::File do
|
|||
|
||||
let(:conflict) { { ancestor: { path: ancestor_path }, theirs: { path: their_path }, ours: { path: our_path } } }
|
||||
let(:raw_conflict_file) { Gitlab::Git::Conflict::File.new(repository, our_commit, conflict, '') }
|
||||
let(:diff_file) { double(renamed_file?: renamed_file?) }
|
||||
|
||||
subject(:conflict_type) { conflict_file.conflict_type(diff_file) }
|
||||
subject(:conflict_type) { conflict_file.conflict_type(when_renamed: renamed_file?) }
|
||||
|
||||
where(:ancestor_path, :their_path, :our_path, :renamed_file?, :result) do
|
||||
'/ancestor/path' | '/their/path' | '/our/path' | false | :both_modified
|
||||
|
|
|
|||
|
|
@ -115,5 +115,38 @@ RSpec.describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state d
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'instrumentation', :request_store do
|
||||
let!(:lease) { stub_exclusive_lease_taken(unique_key) }
|
||||
|
||||
subject do
|
||||
class_instance.in_lock(unique_key, sleep_sec: 0.1, retries: 3) do
|
||||
sleep 0.1
|
||||
end
|
||||
end
|
||||
|
||||
it 'increments lock requested count and computes the duration waiting for the lock and holding the lock' do
|
||||
expect(lease).to receive(:try_obtain).exactly(3).times.and_return(nil)
|
||||
expect(lease).to receive(:try_obtain).once.and_return(unique_key)
|
||||
|
||||
subject
|
||||
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.requested_count).to eq(1)
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.wait_duration).to be_between(0.3, 0.31)
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.hold_duration).to be_between(0.1, 0.11)
|
||||
end
|
||||
|
||||
context 'when exclusive lease is not obtained' do
|
||||
it 'increments lock requested count and computes the duration waiting for the lock' do
|
||||
expect(lease).to receive(:try_obtain).exactly(4).times.and_return(nil)
|
||||
|
||||
expect { subject }.to raise_error('Failed to obtain a lock')
|
||||
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.requested_count).to eq(1)
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.wait_duration).to be_between(0.3, 0.31)
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.hold_duration).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,82 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Instrumentation::ExclusiveLock, :request_store, feature_category: :scalability do
|
||||
describe '.requested_count' do
|
||||
it 'returns the value from Gitlab::SafeRequestStore' do
|
||||
allow(Gitlab::SafeRequestStore).to receive(:[]).with(:exclusive_lock_requested_count).and_return(5)
|
||||
|
||||
expect(described_class.requested_count).to eq(5)
|
||||
end
|
||||
|
||||
it 'returns 0 if value not set in Gitlab::SafeRequestStore' do
|
||||
allow(Gitlab::SafeRequestStore).to receive(:[]).with(:exclusive_lock_requested_count).and_return(nil)
|
||||
|
||||
expect(described_class.requested_count).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.increment_requested_count' do
|
||||
it 'increments the lock count' do
|
||||
expect { described_class.increment_requested_count }
|
||||
.to change { described_class.requested_count }.from(0).to(1)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.wait_duration' do
|
||||
it 'returns the value from Gitlab::SafeRequestStore' do
|
||||
allow(Gitlab::SafeRequestStore).to receive(:[]).with(:exclusive_lock_wait_duration_s).and_return(5)
|
||||
|
||||
expect(described_class.wait_duration).to eq(5)
|
||||
end
|
||||
|
||||
it 'returns 0 if value not set in Gitlab::SafeRequestStore' do
|
||||
allow(Gitlab::SafeRequestStore).to receive(:[]).with(:exclusive_lock_wait_duration_s).and_return(nil)
|
||||
|
||||
expect(described_class.wait_duration).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.add_wait_duration' do
|
||||
it 'increments the duration' do
|
||||
expect { described_class.add_wait_duration(5) }
|
||||
.to change { described_class.wait_duration }.from(0).to(5)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.hold_duration' do
|
||||
it 'returns the value from Gitlab::SafeRequestStore' do
|
||||
allow(Gitlab::SafeRequestStore).to receive(:[]).with(:exclusive_lock_hold_duration_s).and_return(5)
|
||||
|
||||
expect(described_class.hold_duration).to eq(5)
|
||||
end
|
||||
|
||||
it 'returns 0 if value not set in Gitlab::SafeRequestStore' do
|
||||
allow(Gitlab::SafeRequestStore).to receive(:[]).with(:exclusive_lock_hold_duration_s).and_return(nil)
|
||||
|
||||
expect(described_class.hold_duration).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.add_hold_duration' do
|
||||
it 'increments the duration' do
|
||||
expect { described_class.add_hold_duration(5) }
|
||||
.to change { described_class.hold_duration }.from(0).to(5)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.payload' do
|
||||
it 'returns a hash with metrics' do
|
||||
described_class.increment_requested_count
|
||||
described_class.add_wait_duration(2)
|
||||
described_class.add_hold_duration(3)
|
||||
|
||||
expect(described_class.payload).to eq({
|
||||
exclusive_lock_requested_count: 1,
|
||||
exclusive_lock_wait_duration_s: 2,
|
||||
exclusive_lock_hold_duration_s: 3
|
||||
})
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -260,6 +260,38 @@ RSpec.describe Gitlab::InstrumentationHelper, :clean_gitlab_redis_repository_cac
|
|||
})
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an excluside lock is made' do
|
||||
before do
|
||||
Gitlab::SafeRequestStore[:exclusive_lock_requested_count] = 1
|
||||
end
|
||||
|
||||
it 'adds excluside lock data' do
|
||||
subject
|
||||
|
||||
expect(payload).to include({
|
||||
exclusive_lock_requested_count: 1,
|
||||
exclusive_lock_wait_duration_s: 0,
|
||||
exclusive_lock_hold_duration_s: 0
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an excluside lock is not made' do
|
||||
before do
|
||||
Gitlab::SafeRequestStore[:exclusive_lock_requested_count] = 0
|
||||
end
|
||||
|
||||
it 'does not add excluside lock data' do
|
||||
subject
|
||||
|
||||
expect(payload.keys).not_to include(
|
||||
:exclusive_lock_requested_count,
|
||||
:exclusive_lock_wait_duration_s,
|
||||
:exclusive_lock_hold_duration_s
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.queue_duration_for_job' do
|
||||
|
|
|
|||
|
|
@ -38,26 +38,4 @@ RSpec.describe Ci::PipelineVariable, feature_category: :continuous_integration d
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'routing table switch' do
|
||||
context 'with ff disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_partitioning_use_ci_pipeline_variables_routing_table: false)
|
||||
end
|
||||
|
||||
it 'uses the legacy table' do
|
||||
expect(described_class.table_name).to eq('ci_pipeline_variables')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with ff enabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_partitioning_use_ci_pipeline_variables_routing_table: true)
|
||||
end
|
||||
|
||||
it 'uses the routing table' do
|
||||
expect(described_class.table_name).to eq('p_ci_pipeline_variables')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rubocop_spec_helper'
|
||||
require_relative '../../../../rubocop/cop/rails/migration_timestamp'
|
||||
|
||||
RSpec.describe RuboCop::Cop::Rails::MigrationTimestamp, feature_category: :shared do
|
||||
context 'with timestamp in file name in the future' do
|
||||
it 'registers an offense' do
|
||||
expect_offense(<<~RUBY, '/db/migrate/30000220000000_some_migration.rb')
|
||||
print 1
|
||||
^ The date of this file (`30000220000000_some_migration.rb`) must [...]
|
||||
RUBY
|
||||
end
|
||||
end
|
||||
|
||||
context 'with an invalid date for the timestamp in file name in the future' do
|
||||
it 'registers an offense' do
|
||||
expect_offense(<<~RUBY, '/db/migrate/30002420000000_some_migration.rb')
|
||||
print 1
|
||||
^ The date of this file (`30002420000000_some_migration.rb`) must [...]
|
||||
RUBY
|
||||
end
|
||||
end
|
||||
|
||||
context 'with timestamp in file name in the past' do
|
||||
it 'does not register an offense' do
|
||||
expect_no_offenses(<<~RUBY, '/db/migrate/19700101000000_some_migration.rb')
|
||||
print 1
|
||||
RUBY
|
||||
end
|
||||
end
|
||||
|
||||
context 'without timestamp in the file name' do
|
||||
it 'does not register an offense' do
|
||||
expect_no_offenses(<<~RUBY, '/db/migrate/some_migration.rb')
|
||||
print 1
|
||||
RUBY
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -100,12 +100,13 @@ RSpec.describe DiffsEntity do
|
|||
end
|
||||
|
||||
context 'when there are conflicts' do
|
||||
let(:conflict_file) { double(path: diff_files.first.new_path, conflict_type: :both_modified) }
|
||||
let(:conflicts) { double(conflicts: double(files: [conflict_file]), can_be_resolved_in_ui?: false) }
|
||||
|
||||
before do
|
||||
allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
|
||||
allow(MergeRequests::Conflicts::ListService).to receive(:new).and_return(conflicts)
|
||||
allow(entity).to receive(:conflicts_with_types).and_return({
|
||||
diff_files.first.new_path => {
|
||||
conflict_type: :both_modified,
|
||||
conflict_type_when_renamed: :both_modified
|
||||
}
|
||||
})
|
||||
end
|
||||
|
||||
it 'serializes diff files with conflicts' do
|
||||
|
|
@ -113,7 +114,7 @@ RSpec.describe DiffsEntity do
|
|||
.to receive(:represent)
|
||||
.with(
|
||||
diff_files,
|
||||
hash_including(options.merge(conflicts: { conflict_file.path => conflict_file }))
|
||||
hash_including(options.merge(conflicts: entity.conflicts_with_types))
|
||||
)
|
||||
|
||||
subject[:diff_files]
|
||||
|
|
|
|||
|
|
@ -66,12 +66,13 @@ RSpec.describe DiffsMetadataEntity do
|
|||
end
|
||||
|
||||
context 'when there are conflicts' do
|
||||
let(:conflict_file) { double(path: raw_diff_files.first.new_path, conflict_type: :both_modified) }
|
||||
let(:conflicts) { double(conflicts: double(files: [conflict_file]), can_be_resolved_in_ui?: false) }
|
||||
|
||||
before do
|
||||
allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
|
||||
allow(MergeRequests::Conflicts::ListService).to receive(:new).and_return(conflicts)
|
||||
allow(entity).to receive(:conflicts_with_types).and_return({
|
||||
raw_diff_files.first.new_path => {
|
||||
conflict_type: :both_modified,
|
||||
conflict_type_when_renamed: :both_modified
|
||||
}
|
||||
})
|
||||
end
|
||||
|
||||
it 'serializes diff files with conflicts' do
|
||||
|
|
@ -79,7 +80,7 @@ RSpec.describe DiffsMetadataEntity do
|
|||
.to receive(:represent)
|
||||
.with(
|
||||
raw_diff_files,
|
||||
hash_including(options.merge(conflicts: { conflict_file.path => conflict_file }))
|
||||
hash_including(options.merge(conflicts: entity.conflicts_with_types))
|
||||
)
|
||||
|
||||
subject[:diff_files]
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ RSpec.describe Integrations::FieldEntity, feature_category: :integrations do
|
|||
name: 'username',
|
||||
title: 'Email or username',
|
||||
placeholder: nil,
|
||||
label_description: nil,
|
||||
help: 'Email for Jira Cloud or username for Jira Data Center and Jira Server',
|
||||
required: false,
|
||||
choices: nil,
|
||||
|
|
@ -46,6 +47,7 @@ RSpec.describe Integrations::FieldEntity, feature_category: :integrations do
|
|||
name: 'password',
|
||||
title: 'New API token or password',
|
||||
placeholder: nil,
|
||||
label_description: nil,
|
||||
help: 'Leave blank to use your current configuration',
|
||||
required: true,
|
||||
choices: nil,
|
||||
|
|
@ -71,6 +73,7 @@ RSpec.describe Integrations::FieldEntity, feature_category: :integrations do
|
|||
name: 'send_from_committer_email',
|
||||
title: 'Send from committer',
|
||||
placeholder: nil,
|
||||
label_description: nil,
|
||||
required: nil,
|
||||
choices: nil,
|
||||
value: 'true',
|
||||
|
|
@ -95,6 +98,7 @@ RSpec.describe Integrations::FieldEntity, feature_category: :integrations do
|
|||
name: 'branches_to_be_notified',
|
||||
title: 'Branches for which notifications are to be sent',
|
||||
placeholder: nil,
|
||||
label_description: nil,
|
||||
required: nil,
|
||||
choices: [
|
||||
['All branches', 'all'],
|
||||
|
|
@ -123,6 +127,7 @@ RSpec.describe Integrations::FieldEntity, feature_category: :integrations do
|
|||
name: 'webhook',
|
||||
title: nil,
|
||||
placeholder: nil,
|
||||
label_description: nil,
|
||||
help: 'http://mattermost.example.com/hooks/...',
|
||||
required: true,
|
||||
choices: nil,
|
||||
|
|
|
|||
|
|
@ -42,12 +42,13 @@ RSpec.describe PaginatedDiffEntity do
|
|||
end
|
||||
|
||||
context 'when there are conflicts' do
|
||||
let(:conflict_file) { double(path: diff_files.first.new_path, conflict_type: :both_modified) }
|
||||
let(:conflicts) { double(conflicts: double(files: [conflict_file]), can_be_resolved_in_ui?: false) }
|
||||
|
||||
before do
|
||||
allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
|
||||
allow(MergeRequests::Conflicts::ListService).to receive(:new).and_return(conflicts)
|
||||
allow(entity).to receive(:conflicts_with_types).and_return({
|
||||
diff_files.first.new_path => {
|
||||
conflict_type: :both_modified,
|
||||
conflict_type_when_renamed: :both_modified
|
||||
}
|
||||
})
|
||||
end
|
||||
|
||||
it 'serializes diff files with conflicts' do
|
||||
|
|
@ -55,7 +56,7 @@ RSpec.describe PaginatedDiffEntity do
|
|||
.to receive(:represent)
|
||||
.with(
|
||||
diff_files,
|
||||
hash_including(options.merge(conflicts: { conflict_file.path => conflict_file }))
|
||||
hash_including(options.merge(conflicts: entity.conflicts_with_types))
|
||||
)
|
||||
|
||||
subject[:diff_files]
|
||||
|
|
|
|||
|
|
@ -40,6 +40,50 @@ RSpec.describe ExclusiveLeaseGuard, :clean_gitlab_redis_shared_state, feature_ca
|
|||
expect(subject.exclusive_lease.exists?).to be_falsey
|
||||
end
|
||||
|
||||
describe 'instrumentation', :request_store do
|
||||
it 'increments the lock requested count and computes the duration of holding the lock' do
|
||||
subject.call do
|
||||
sleep 0.1
|
||||
end
|
||||
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.requested_count).to eq(1)
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.hold_duration).to be_between(0.1, 0.11)
|
||||
end
|
||||
|
||||
context 'when exclusive lease is not obtained' do
|
||||
before do
|
||||
allow_next_instance_of(Gitlab::ExclusiveLease) do |instance|
|
||||
allow(instance).to receive(:try_obtain).and_return(false)
|
||||
end
|
||||
end
|
||||
|
||||
it 'increments the lock requested count and does not computes the duration of holding the lock' do
|
||||
subject.call do
|
||||
sleep 0.1
|
||||
end
|
||||
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.requested_count).to eq(1)
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.hold_duration).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an exception is raised during the lease' do
|
||||
subject do
|
||||
subject_class.new.call do
|
||||
sleep 0.1
|
||||
raise StandardError
|
||||
end
|
||||
end
|
||||
|
||||
it 'increments the lock requested count and computes the duration of holding the lock' do
|
||||
expect { subject }.to raise_error(StandardError)
|
||||
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.requested_count).to eq(1)
|
||||
expect(Gitlab::Instrumentation::ExclusiveLock.hold_duration).to be_between(0.1, 0.11)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the lease is already obtained' do
|
||||
before do
|
||||
subject.exclusive_lease.try_obtain
|
||||
|
|
|
|||
|
|
@ -461,7 +461,6 @@
|
|||
- './ee/spec/finders/auth/provisioned_users_finder_spec.rb'
|
||||
- './ee/spec/finders/autocomplete/group_subgroups_finder_spec.rb'
|
||||
- './ee/spec/finders/autocomplete/project_invited_groups_finder_spec.rb'
|
||||
- './ee/spec/finders/autocomplete/vulnerabilities_autocomplete_finder_spec.rb'
|
||||
- './ee/spec/finders/billed_users_finder_spec.rb'
|
||||
- './ee/spec/finders/boards/boards_finder_spec.rb'
|
||||
- './ee/spec/finders/boards/epic_boards_finder_spec.rb'
|
||||
|
|
|
|||
|
|
@ -130,10 +130,35 @@ RSpec.shared_examples 'diff file with conflict_type' do
|
|||
end
|
||||
|
||||
context 'when there is matching conflict file' do
|
||||
let(:options) { { conflicts: { diff_file.new_path => double(diff_lines_for_serializer: [], conflict_type: :both_modified) } } }
|
||||
let(:renamed_file?) { false }
|
||||
|
||||
it 'returns false' do
|
||||
expect(subject[:conflict_type]).to eq(:both_modified)
|
||||
let(:options) do
|
||||
{
|
||||
conflicts: {
|
||||
diff_file.new_path => {
|
||||
conflict_type: :removed_target_renamed_source,
|
||||
conflict_type_when_renamed: :renamed_same_file
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
allow(diff_file)
|
||||
.to receive(:renamed_file?)
|
||||
.and_return(renamed_file?)
|
||||
end
|
||||
|
||||
it 'returns conflict_type' do
|
||||
expect(subject[:conflict_type]).to eq(:removed_target_renamed_source)
|
||||
end
|
||||
|
||||
context 'when diff file is renamed' do
|
||||
let(:renamed_file?) { true }
|
||||
|
||||
it 'returns conflict_type' do
|
||||
expect(subject[:conflict_type]).to eq(:renamed_same_file)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue