Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
f91915aadb
commit
29549d052d
|
|
@ -1 +1 @@
|
|||
0e78015ff2052203845e049be8b3395bac782554
|
||||
bb342a5916dd5ecffb8b281de8290066176c2662
|
||||
|
|
|
|||
|
|
@ -69,8 +69,13 @@ export default {
|
|||
dismissableDescription: s__('BroadcastMessages|Allow users to dismiss the broadcast message'),
|
||||
target: s__('BroadcastMessages|Target broadcast message'),
|
||||
targetRoles: s__('BroadcastMessages|Target roles'),
|
||||
targetRolesRequired: s__('BroadcastMessages|Select at least one role.'),
|
||||
targetRolesValidationMsg: s__('BroadcastMessages|One or more roles is required.'),
|
||||
targetPath: s__('BroadcastMessages|Target Path'),
|
||||
targetPathDescription: s__('BroadcastMessages|Paths can contain wildcards, like */welcome'),
|
||||
targetPathDescription: s__('BroadcastMessages|Paths can contain wildcards, like */welcome.'),
|
||||
targetPathWithRolesReminder: s__(
|
||||
'BroadcastMessages|Leave blank to target all group and project pages.',
|
||||
),
|
||||
startsAt: s__('BroadcastMessages|Starts at'),
|
||||
endsAt: s__('BroadcastMessages|Ends at'),
|
||||
add: s__('BroadcastMessages|Add broadcast message'),
|
||||
|
|
@ -110,6 +115,7 @@ export default {
|
|||
endsAt: new Date(this.broadcastMessage.endsAt.getTime()),
|
||||
renderedMessage: '',
|
||||
showInCli: this.broadcastMessage.showInCli,
|
||||
isValidated: false,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
|
|
@ -138,6 +144,18 @@ export default {
|
|||
this.targetSelected === TARGET_ROLES || this.targetSelected === TARGET_ALL_MATCHING_PATH
|
||||
);
|
||||
},
|
||||
targetPathDescription() {
|
||||
const defaultDescription = this.$options.i18n.targetPathDescription;
|
||||
|
||||
if (this.showTargetRoles) {
|
||||
return `${defaultDescription} ${this.$options.i18n.targetPathWithRolesReminder}`;
|
||||
}
|
||||
|
||||
return defaultDescription;
|
||||
},
|
||||
targetRolesValid() {
|
||||
return !this.showTargetRoles || this.targetAccessLevels.length > 0;
|
||||
},
|
||||
formPayload() {
|
||||
return JSON.stringify({
|
||||
message: this.message,
|
||||
|
|
@ -177,6 +195,12 @@ export default {
|
|||
},
|
||||
async onSubmit() {
|
||||
this.loading = true;
|
||||
this.isValidated = true;
|
||||
|
||||
if (!this.targetRolesValid) {
|
||||
this.loading = false;
|
||||
return;
|
||||
}
|
||||
|
||||
const success = await this.submitForm();
|
||||
if (success) {
|
||||
|
|
@ -294,6 +318,9 @@ export default {
|
|||
<gl-form-group
|
||||
v-show="showTargetRoles"
|
||||
:label="$options.i18n.targetRoles"
|
||||
:label-description="$options.i18n.targetRolesRequired"
|
||||
:invalid-feedback="$options.i18n.targetRolesValidationMsg"
|
||||
:state="!isValidated || targetRolesValid"
|
||||
data-testid="target-roles-checkboxes"
|
||||
>
|
||||
<gl-form-checkbox-group v-model="targetAccessLevels" :options="targetAccessLevelOptions" />
|
||||
|
|
@ -307,7 +334,7 @@ export default {
|
|||
>
|
||||
<gl-form-input id="target-path-input" v-model="targetPath" />
|
||||
<gl-form-text>
|
||||
{{ $options.i18n.targetPathDescription }}
|
||||
{{ targetPathDescription }}
|
||||
</gl-form-text>
|
||||
</gl-form-group>
|
||||
|
||||
|
|
@ -326,7 +353,7 @@ export default {
|
|||
:loading="loading"
|
||||
:disabled="messageBlank"
|
||||
data-testid="submit-button"
|
||||
class="gl-mr-2"
|
||||
class="js-no-auto-disable gl-mr-2"
|
||||
>
|
||||
{{ isAddForm ? $options.i18n.add : $options.i18n.update }}
|
||||
</gl-button>
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import {
|
|||
TARGET_TYPE_MERGE_REQUEST,
|
||||
EVENT_CLOSED_ICONS,
|
||||
} from 'ee_else_ce/contribution_events/constants';
|
||||
import { getValueByEventTarget } from '../../utils';
|
||||
import ContributionEventBase from './contribution_event_base.vue';
|
||||
|
||||
export default {
|
||||
|
|
@ -16,20 +17,14 @@ export default {
|
|||
},
|
||||
},
|
||||
computed: {
|
||||
target() {
|
||||
return this.event.target;
|
||||
},
|
||||
targetType() {
|
||||
return this.target.type;
|
||||
},
|
||||
issueType() {
|
||||
return this.target.issue_type;
|
||||
return this.event.target.type;
|
||||
},
|
||||
message() {
|
||||
return EVENT_CLOSED_I18N[this.issueType || this.targetType] || EVENT_CLOSED_I18N.fallback;
|
||||
return getValueByEventTarget(EVENT_CLOSED_I18N, this.event);
|
||||
},
|
||||
iconName() {
|
||||
return EVENT_CLOSED_ICONS[this.issueType || this.targetType] || EVENT_CLOSED_ICONS.fallback;
|
||||
return getValueByEventTarget(EVENT_CLOSED_ICONS, this.event);
|
||||
},
|
||||
iconClass() {
|
||||
return this.targetType === TARGET_TYPE_MERGE_REQUEST ? 'gl-text-red-500' : 'gl-text-blue-500';
|
||||
|
|
|
|||
|
|
@ -1,5 +1,10 @@
|
|||
<script>
|
||||
import { EVENT_CREATED_I18N, TARGET_TYPE_DESIGN } from 'ee_else_ce/contribution_events/constants';
|
||||
import {
|
||||
EVENT_CREATED_I18N,
|
||||
TARGET_TYPE_DESIGN,
|
||||
TYPE_FALLBACK,
|
||||
} from 'ee_else_ce/contribution_events/constants';
|
||||
import { getValueByEventTarget } from '../../utils';
|
||||
import ContributionEventBase from './contribution_event_base.vue';
|
||||
|
||||
export default {
|
||||
|
|
@ -18,15 +23,12 @@ export default {
|
|||
resourceParent() {
|
||||
return this.event.resource_parent;
|
||||
},
|
||||
issueType() {
|
||||
return this.target.issue_type;
|
||||
},
|
||||
message() {
|
||||
if (!this.target) {
|
||||
return EVENT_CREATED_I18N[this.resourceParent.type] || EVENT_CREATED_I18N.fallback;
|
||||
return EVENT_CREATED_I18N[this.resourceParent.type] || EVENT_CREATED_I18N[TYPE_FALLBACK];
|
||||
}
|
||||
|
||||
return EVENT_CREATED_I18N[this.issueType || this.target.type] || EVENT_CREATED_I18N.fallback;
|
||||
return getValueByEventTarget(EVENT_CREATED_I18N, this.event);
|
||||
},
|
||||
iconName() {
|
||||
switch (this.target?.type) {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,36 @@
|
|||
<script>
|
||||
import {
|
||||
EVENT_REOPENED_I18N,
|
||||
EVENT_REOPENED_ICONS,
|
||||
} from 'ee_else_ce/contribution_events/constants';
|
||||
import { getValueByEventTarget } from '../../utils';
|
||||
import ContributionEventBase from './contribution_event_base.vue';
|
||||
|
||||
export default {
|
||||
name: 'ContributionEventReopened',
|
||||
components: { ContributionEventBase },
|
||||
props: {
|
||||
event: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
message() {
|
||||
return getValueByEventTarget(EVENT_REOPENED_I18N, this.event);
|
||||
},
|
||||
iconName() {
|
||||
return getValueByEventTarget(EVENT_REOPENED_ICONS, this.event);
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<contribution-event-base
|
||||
:event="event"
|
||||
:message="message"
|
||||
:icon-name="iconName"
|
||||
icon-class="gl-text-green-500"
|
||||
/>
|
||||
</template>
|
||||
|
|
@ -10,6 +10,7 @@ import {
|
|||
EVENT_TYPE_MERGED,
|
||||
EVENT_TYPE_CREATED,
|
||||
EVENT_TYPE_CLOSED,
|
||||
EVENT_TYPE_REOPENED,
|
||||
} from '../constants';
|
||||
import ContributionEventApproved from './contribution_event/contribution_event_approved.vue';
|
||||
import ContributionEventExpired from './contribution_event/contribution_event_expired.vue';
|
||||
|
|
@ -20,6 +21,7 @@ import ContributionEventPrivate from './contribution_event/contribution_event_pr
|
|||
import ContributionEventMerged from './contribution_event/contribution_event_merged.vue';
|
||||
import ContributionEventCreated from './contribution_event/contribution_event_created.vue';
|
||||
import ContributionEventClosed from './contribution_event/contribution_event_closed.vue';
|
||||
import ContributionEventReopened from './contribution_event/contribution_event_reopened.vue';
|
||||
|
||||
export default {
|
||||
props: {
|
||||
|
|
@ -141,6 +143,9 @@ export default {
|
|||
case EVENT_TYPE_CLOSED:
|
||||
return ContributionEventClosed;
|
||||
|
||||
case EVENT_TYPE_REOPENED:
|
||||
return ContributionEventReopened;
|
||||
|
||||
default:
|
||||
return EmptyComponent;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -34,6 +34,8 @@ export const WORK_ITEM_ISSUE_TYPE_ISSUE = 'issue';
|
|||
export const WORK_ITEM_ISSUE_TYPE_TASK = 'task';
|
||||
export const WORK_ITEM_ISSUE_TYPE_INCIDENT = 'incident';
|
||||
|
||||
export const TYPE_FALLBACK = 'fallback';
|
||||
|
||||
export const EVENT_CREATED_I18N = {
|
||||
[RESOURCE_PARENT_TYPE_PROJECT]: s__('ContributionEvent|Created project %{resourceParentLink}.'),
|
||||
[TARGET_TYPE_MILESTONE]: s__(
|
||||
|
|
@ -57,7 +59,7 @@ export const EVENT_CREATED_I18N = {
|
|||
[WORK_ITEM_ISSUE_TYPE_INCIDENT]: s__(
|
||||
'ContributionEvent|Opened incident %{targetLink} in %{resourceParentLink}.',
|
||||
),
|
||||
fallback: s__('ContributionEvent|Created resource.'),
|
||||
[TYPE_FALLBACK]: s__('ContributionEvent|Created resource.'),
|
||||
};
|
||||
|
||||
export const EVENT_CLOSED_I18N = {
|
||||
|
|
@ -76,11 +78,35 @@ export const EVENT_CLOSED_I18N = {
|
|||
[WORK_ITEM_ISSUE_TYPE_INCIDENT]: s__(
|
||||
'ContributionEvent|Closed incident %{targetLink} in %{resourceParentLink}.',
|
||||
),
|
||||
fallback: s__('ContributionEvent|Closed resource.'),
|
||||
[TYPE_FALLBACK]: s__('ContributionEvent|Closed resource.'),
|
||||
};
|
||||
|
||||
export const EVENT_REOPENED_I18N = {
|
||||
[TARGET_TYPE_MILESTONE]: s__(
|
||||
'ContributionEvent|Reopened milestone %{targetLink} in %{resourceParentLink}.',
|
||||
),
|
||||
[TARGET_TYPE_MERGE_REQUEST]: s__(
|
||||
'ContributionEvent|Reopened merge request %{targetLink} in %{resourceParentLink}.',
|
||||
),
|
||||
[WORK_ITEM_ISSUE_TYPE_ISSUE]: s__(
|
||||
'ContributionEvent|Reopened issue %{targetLink} in %{resourceParentLink}.',
|
||||
),
|
||||
[WORK_ITEM_ISSUE_TYPE_TASK]: s__(
|
||||
'ContributionEvent|Reopened task %{targetLink} in %{resourceParentLink}.',
|
||||
),
|
||||
[WORK_ITEM_ISSUE_TYPE_INCIDENT]: s__(
|
||||
'ContributionEvent|Reopened incident %{targetLink} in %{resourceParentLink}.',
|
||||
),
|
||||
[TYPE_FALLBACK]: s__('ContributionEvent|Reopened resource.'),
|
||||
};
|
||||
|
||||
export const EVENT_CLOSED_ICONS = {
|
||||
[WORK_ITEM_ISSUE_TYPE_ISSUE]: 'issue-closed',
|
||||
[TARGET_TYPE_MERGE_REQUEST]: 'merge-request-close',
|
||||
fallback: 'status_closed',
|
||||
[TYPE_FALLBACK]: 'status_closed',
|
||||
};
|
||||
|
||||
export const EVENT_REOPENED_ICONS = {
|
||||
[TARGET_TYPE_MERGE_REQUEST]: 'merge-request-open',
|
||||
[TYPE_FALLBACK]: 'status_open',
|
||||
};
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
import { TYPE_FALLBACK } from './constants';
|
||||
|
||||
export const getValueByEventTarget = (map, event) => {
|
||||
const {
|
||||
target: { type: targetType, issue_type: issueType },
|
||||
} = event;
|
||||
|
||||
return map[issueType || targetType] || map[TYPE_FALLBACK];
|
||||
};
|
||||
|
|
@ -2,9 +2,7 @@
|
|||
import { GlLoadingIcon } from '@gitlab/ui';
|
||||
import { createAlert } from '~/alert';
|
||||
import { visitUrl } from '~/lib/utils/url_utility';
|
||||
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import getEnvironment from '../graphql/queries/environment.query.graphql';
|
||||
import getEnvironmentWithNamespace from '../graphql/queries/environment_with_namespace.graphql';
|
||||
import updateEnvironment from '../graphql/mutations/update_environment.mutation.graphql';
|
||||
import EnvironmentForm from './environment_form.vue';
|
||||
|
||||
|
|
@ -13,14 +11,11 @@ export default {
|
|||
GlLoadingIcon,
|
||||
EnvironmentForm,
|
||||
},
|
||||
mixins: [glFeatureFlagsMixin()],
|
||||
inject: ['projectEnvironmentsPath', 'projectPath', 'environmentName'],
|
||||
apollo: {
|
||||
environment: {
|
||||
query() {
|
||||
return this.glFeatures?.kubernetesNamespaceForEnvironment
|
||||
? getEnvironmentWithNamespace
|
||||
: getEnvironment;
|
||||
return getEnvironment;
|
||||
},
|
||||
variables() {
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -17,7 +17,6 @@ import {
|
|||
ENVIRONMENT_EDIT_HELP_TEXT,
|
||||
} from 'ee_else_ce/environments/constants';
|
||||
import csrf from '~/lib/utils/csrf';
|
||||
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import getNamespacesQuery from '../graphql/queries/k8s_namespaces.query.graphql';
|
||||
import getUserAuthorizedAgents from '../graphql/queries/user_authorized_agents.query.graphql';
|
||||
|
|
@ -33,7 +32,6 @@ export default {
|
|||
GlSprintf,
|
||||
GlAlert,
|
||||
},
|
||||
mixins: [glFeatureFlagsMixin()],
|
||||
inject: {
|
||||
protectedEnvironmentSettingsPath: { default: '' },
|
||||
projectPath: { default: '' },
|
||||
|
|
@ -173,11 +171,8 @@ export default {
|
|||
item.text.toLowerCase().includes(lowerCasedSearchTerm),
|
||||
);
|
||||
},
|
||||
isKasKubernetesNamespaceAvailable() {
|
||||
return this.glFeatures?.kubernetesNamespaceForEnvironment;
|
||||
},
|
||||
showNamespaceSelector() {
|
||||
return Boolean(this.isKasKubernetesNamespaceAvailable && this.selectedAgentId);
|
||||
return Boolean(this.selectedAgentId);
|
||||
},
|
||||
namespaceDropdownToggleText() {
|
||||
return this.selectedNamespace || this.$options.i18n.namespaceHelpText;
|
||||
|
|
|
|||
|
|
@ -11,10 +11,8 @@ import {
|
|||
import { __, s__ } from '~/locale';
|
||||
import { truncate } from '~/lib/utils/text_utility';
|
||||
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
|
||||
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import isLastDeployment from '../graphql/queries/is_last_deployment.query.graphql';
|
||||
import getEnvironmentClusterAgent from '../graphql/queries/environment_cluster_agent.query.graphql';
|
||||
import getEnvironmentClusterAgentWithNamespace from '../graphql/queries/environment_cluster_agent_with_namespace.query.graphql';
|
||||
import ExternalUrl from './environment_external_url.vue';
|
||||
import Actions from './environment_actions.vue';
|
||||
import StopComponent from './environment_stop.vue';
|
||||
|
|
@ -52,7 +50,6 @@ export default {
|
|||
directives: {
|
||||
GlTooltip,
|
||||
},
|
||||
mixins: [glFeatureFlagsMixin()],
|
||||
inject: ['helpPagePath', 'projectPath'],
|
||||
props: {
|
||||
environment: {
|
||||
|
|
@ -165,9 +162,6 @@ export default {
|
|||
rolloutStatus() {
|
||||
return this.environment?.rolloutStatus;
|
||||
},
|
||||
isKubernetesNamespaceAvailable() {
|
||||
return this.glFeatures?.kubernetesNamespaceForEnvironment;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
toggleEnvironmentCollapse() {
|
||||
|
|
@ -185,9 +179,7 @@ export default {
|
|||
return { environmentName: this.environment.name, projectFullPath: this.projectPath };
|
||||
},
|
||||
query() {
|
||||
return this.isKubernetesNamespaceAvailable
|
||||
? getEnvironmentClusterAgentWithNamespace
|
||||
: getEnvironmentClusterAgent;
|
||||
return getEnvironmentClusterAgent;
|
||||
},
|
||||
update(data) {
|
||||
this.clusterAgent = data?.project?.environment?.clusterAgent;
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ query getEnvironment($projectFullPath: ID!, $environmentName: String) {
|
|||
id
|
||||
name
|
||||
externalUrl
|
||||
kubernetesNamespace
|
||||
clusterAgent {
|
||||
id
|
||||
name
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ query getEnvironmentClusterAgent($projectFullPath: ID!, $environmentName: String
|
|||
id
|
||||
environment(name: $environmentName) {
|
||||
id
|
||||
kubernetesNamespace
|
||||
clusterAgent {
|
||||
id
|
||||
name
|
||||
|
|
|
|||
|
|
@ -1,20 +0,0 @@
|
|||
query getEnvironmentClusterAgentWithNamespace($projectFullPath: ID!, $environmentName: String) {
|
||||
project(fullPath: $projectFullPath) {
|
||||
id
|
||||
environment(name: $environmentName) {
|
||||
id
|
||||
kubernetesNamespace
|
||||
clusterAgent {
|
||||
id
|
||||
name
|
||||
webPath
|
||||
tokens {
|
||||
nodes {
|
||||
id
|
||||
lastUsedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
query getEnvironmentWithNamespace($projectFullPath: ID!, $environmentName: String) {
|
||||
project(fullPath: $projectFullPath) {
|
||||
id
|
||||
environment(name: $environmentName) {
|
||||
id
|
||||
name
|
||||
externalUrl
|
||||
kubernetesNamespace
|
||||
clusterAgent {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -454,7 +454,7 @@ export default {
|
|||
</div>
|
||||
<ai-genie
|
||||
v-if="explainCodeAvailable"
|
||||
container-id="fileHolder"
|
||||
container-selector=".file-content"
|
||||
:file-path="path"
|
||||
class="gl-ml-7"
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import MrWidgetOptions from 'ee_else_ce/vue_merge_request_widget/mr_widget_options.vue';
|
||||
import MrWidgetOptions from 'any_else_ce/vue_merge_request_widget/mr_widget_options.vue';
|
||||
import createDefaultClient from '~/lib/graphql';
|
||||
import { parseBoolean } from '~/lib/utils/common_utils';
|
||||
import Translate from '../vue_shared/translate';
|
||||
|
|
|
|||
|
|
@ -40,6 +40,10 @@ export default {
|
|||
this.track(EVENT_ACTION, { label: EVENT_LABEL_VIEWER, property: this.blob.language });
|
||||
addBlobLinksTracking();
|
||||
},
|
||||
mounted() {
|
||||
const { hash } = this.$route;
|
||||
this.lineHighlighter.highlightHash(hash);
|
||||
},
|
||||
userColorScheme: window.gon.user_color_scheme,
|
||||
};
|
||||
</script>
|
||||
|
|
|
|||
|
|
@ -12,10 +12,6 @@ class Projects::EnvironmentsController < Projects::ApplicationController
|
|||
push_frontend_feature_flag(:environment_details_vue, @project)
|
||||
end
|
||||
|
||||
before_action only: [:index, :edit, :new] do
|
||||
push_frontend_feature_flag(:kubernetes_namespace_for_environment)
|
||||
end
|
||||
|
||||
before_action :authorize_read_environment!
|
||||
before_action :authorize_create_environment!, only: [:new, :create]
|
||||
before_action :authorize_stop_environment!, only: [:stop]
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ class IssuableFinder
|
|||
|
||||
def projects
|
||||
strong_memoize(:projects) do
|
||||
next [project] if project?
|
||||
next Array.wrap(project) if project?
|
||||
|
||||
projects =
|
||||
if current_user && params[:authorized_only].presence && !current_user_related?
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
module Ci
|
||||
class JobAnnotation < Ci::ApplicationRecord
|
||||
include Ci::Partitionable
|
||||
include BulkInsertSafe
|
||||
|
||||
self.table_name = :p_ci_job_annotations
|
||||
self.primary_key = :id
|
||||
|
|
@ -13,7 +14,6 @@ module Ci
|
|||
|
||||
validates :data, json_schema: { filename: 'ci_job_annotation_data' }
|
||||
validates :name, presence: true,
|
||||
length: { maximum: 255 },
|
||||
uniqueness: { scope: [:job_id, :partition_id] }
|
||||
length: { maximum: 255 }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -60,7 +60,8 @@ module Ci
|
|||
requirements_v2: 'requirements_v2.json',
|
||||
coverage_fuzzing: 'gl-coverage-fuzzing.json',
|
||||
api_fuzzing: 'gl-api-fuzzing-report.json',
|
||||
cyclonedx: 'gl-sbom.cdx.json'
|
||||
cyclonedx: 'gl-sbom.cdx.json',
|
||||
annotations: 'gl-annotations.json'
|
||||
}.freeze
|
||||
|
||||
INTERNAL_TYPES = {
|
||||
|
|
@ -79,6 +80,7 @@ module Ci
|
|||
cluster_applications: :gzip, # DEPRECATED: https://gitlab.com/gitlab-org/gitlab/-/issues/361094
|
||||
lsif: :zip,
|
||||
cyclonedx: :gzip,
|
||||
annotations: :gzip,
|
||||
|
||||
# Security reports and license scanning reports are raw artifacts
|
||||
# because they used to be fetched by the frontend, but this is not the case anymore.
|
||||
|
|
@ -221,7 +223,8 @@ module Ci
|
|||
api_fuzzing: 26, ## EE-specific
|
||||
cluster_image_scanning: 27, ## EE-specific
|
||||
cyclonedx: 28, ## EE-specific
|
||||
requirements_v2: 29 ## EE-specific
|
||||
requirements_v2: 29, ## EE-specific
|
||||
annotations: 30
|
||||
}
|
||||
|
||||
# `file_location` indicates where actual files are stored.
|
||||
|
|
|
|||
|
|
@ -12,7 +12,13 @@ class PoolRepository < ApplicationRecord
|
|||
|
||||
has_many :member_projects, class_name: 'Project'
|
||||
|
||||
after_create :correct_disk_path
|
||||
after_create :set_disk_path
|
||||
|
||||
scope :by_source_project, ->(project) { where(source_project: project) }
|
||||
scope :by_source_project_and_shard_name, ->(project, shard_name) do
|
||||
by_source_project(project)
|
||||
.for_repository_storage(shard_name)
|
||||
end
|
||||
|
||||
state_machine :state, initial: :none do
|
||||
state :scheduled
|
||||
|
|
@ -107,8 +113,8 @@ class PoolRepository < ApplicationRecord
|
|||
|
||||
private
|
||||
|
||||
def correct_disk_path
|
||||
update!(disk_path: storage.disk_path)
|
||||
def set_disk_path
|
||||
update!(disk_path: storage.disk_path) if disk_path.blank?
|
||||
end
|
||||
|
||||
def storage
|
||||
|
|
|
|||
|
|
@ -1953,6 +1953,8 @@ class Project < ApplicationRecord
|
|||
def track_project_repository
|
||||
repository = project_repository || build_project_repository
|
||||
repository.update!(shard_name: repository_storage, disk_path: disk_path)
|
||||
|
||||
cleanup if replicate_object_pool_on_move_ff_enabled?
|
||||
end
|
||||
|
||||
def create_repository(force: false, default_branch: nil)
|
||||
|
|
@ -2827,8 +2829,26 @@ class Project < ApplicationRecord
|
|||
update_column(:pool_repository_id, nil)
|
||||
end
|
||||
|
||||
# After repository is moved from shard to shard, disconnect it from the previous object pool and connect to the new pool
|
||||
def swap_pool_repository!
|
||||
return unless replicate_object_pool_on_move_ff_enabled?
|
||||
return unless repository_exists?
|
||||
|
||||
old_pool_repository = pool_repository
|
||||
return if old_pool_repository.blank?
|
||||
return if pool_repository_shard_matches_repository?(old_pool_repository)
|
||||
|
||||
new_pool_repository = PoolRepository.by_source_project_and_shard_name(old_pool_repository.source_project, repository_storage).take!
|
||||
update!(pool_repository: new_pool_repository)
|
||||
|
||||
old_pool_repository.unlink_repository(repository, disconnect: !pending_delete?)
|
||||
end
|
||||
|
||||
def link_pool_repository
|
||||
pool_repository&.link_repository(repository)
|
||||
return unless pool_repository
|
||||
return if (pool_repository.shard_name != repository.shard) && replicate_object_pool_on_move_ff_enabled?
|
||||
|
||||
pool_repository.link_repository(repository)
|
||||
end
|
||||
|
||||
def has_pool_repository?
|
||||
|
|
@ -3507,6 +3527,16 @@ class Project < ApplicationRecord
|
|||
def runners_token_prefix
|
||||
RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX
|
||||
end
|
||||
|
||||
def replicate_object_pool_on_move_ff_enabled?
|
||||
Feature.enabled?(:replicate_object_pool_on_move, self)
|
||||
end
|
||||
|
||||
def pool_repository_shard_matches_repository?(pool)
|
||||
pool_repository_shard = pool.shard.name
|
||||
|
||||
pool_repository_shard == repository_storage
|
||||
end
|
||||
end
|
||||
|
||||
Project.prepend_mod_with('Project')
|
||||
|
|
|
|||
|
|
@ -138,6 +138,7 @@ module Ci
|
|||
def parse_artifact(artifact)
|
||||
case artifact.file_type
|
||||
when 'dotenv' then parse_dotenv_artifact(artifact)
|
||||
when 'annotations' then parse_annotations_artifact(artifact)
|
||||
else success
|
||||
end
|
||||
end
|
||||
|
|
@ -188,6 +189,10 @@ module Ci
|
|||
def parse_dotenv_artifact(artifact)
|
||||
Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
|
||||
end
|
||||
|
||||
def parse_annotations_artifact(artifact)
|
||||
Ci::ParseAnnotationsArtifactService.new(project, current_user).execute(artifact)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,61 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
class ParseAnnotationsArtifactService < ::BaseService
|
||||
include ::Gitlab::Utils::StrongMemoize
|
||||
include ::Gitlab::EncodingHelper
|
||||
|
||||
SizeLimitError = Class.new(StandardError)
|
||||
ParserError = Class.new(StandardError)
|
||||
|
||||
def execute(artifact)
|
||||
return error('Artifact is not annotations file type', :bad_request) unless artifact&.annotations?
|
||||
|
||||
return error("Annotations Artifact Too Big. Maximum Allowable Size: #{annotations_size_limit}", :bad_request) if
|
||||
artifact.file.size > annotations_size_limit
|
||||
|
||||
annotations = parse!(artifact)
|
||||
Ci::JobAnnotation.bulk_upsert!(annotations, unique_by: %i[partition_id job_id name])
|
||||
|
||||
success
|
||||
rescue SizeLimitError, ParserError, Gitlab::Json.parser_error, ActiveRecord::RecordInvalid => error
|
||||
error(error.message, :bad_request)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def parse!(artifact)
|
||||
annotations = []
|
||||
|
||||
artifact.each_blob do |blob|
|
||||
# Windows powershell may output UTF-16LE files, so convert the whole file
|
||||
# to UTF-8 before proceeding.
|
||||
blob = strip_bom(encode_utf8_with_replacement_character(blob))
|
||||
|
||||
blob_json = Gitlab::Json.parse(blob)
|
||||
raise ParserError, 'Annotations files must be a JSON object' unless blob_json.is_a?(Hash)
|
||||
|
||||
blob_json.each do |key, value|
|
||||
annotations.push(Ci::JobAnnotation.new(job: artifact.job, name: key, data: value))
|
||||
|
||||
if annotations.size > annotations_num_limit
|
||||
raise SizeLimitError,
|
||||
"Annotations files cannot have more than #{annotations_num_limit} annotation lists"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
annotations
|
||||
end
|
||||
|
||||
def annotations_num_limit
|
||||
project.actual_limits.ci_job_annotations_num
|
||||
end
|
||||
strong_memoize_attr :annotations_num_limit
|
||||
|
||||
def annotations_size_limit
|
||||
project.actual_limits.ci_job_annotations_size
|
||||
end
|
||||
strong_memoize_attr :annotations_size_limit
|
||||
end
|
||||
end
|
||||
|
|
@ -24,7 +24,13 @@ module UpdateRepositoryStorageMethods
|
|||
|
||||
return response if response
|
||||
|
||||
mirror_repositories unless same_filesystem?
|
||||
unless same_filesystem?
|
||||
mirror_repositories
|
||||
|
||||
repository_storage_move.transaction do
|
||||
mirror_object_pool(destination_storage_name)
|
||||
end
|
||||
end
|
||||
|
||||
repository_storage_move.transaction do
|
||||
repository_storage_move.finish_replication!
|
||||
|
|
@ -53,6 +59,11 @@ module UpdateRepositoryStorageMethods
|
|||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def mirror_object_pool(_destination_shard)
|
||||
# no-op, redefined for Projects::UpdateRepositoryStorageService
|
||||
nil
|
||||
end
|
||||
|
||||
def mirror_repository(type:)
|
||||
unless wait_for_pushes(type)
|
||||
raise Error, s_('UpdateRepositoryStorage|Timeout waiting for %{type} repository pushes') % { type: type.name }
|
||||
|
|
|
|||
|
|
@ -9,12 +9,20 @@ module Projects
|
|||
private
|
||||
|
||||
def track_repository(_destination_storage_name)
|
||||
project.leave_pool_repository
|
||||
# Connect project to pool repository from the new shard
|
||||
project.swap_pool_repository!
|
||||
|
||||
# Connect project to the repository from the new shard
|
||||
project.track_project_repository
|
||||
|
||||
# Link repository from the new shard to pool repository from the new shard
|
||||
project.link_pool_repository if replicate_object_pool_on_move_ff_enabled?
|
||||
end
|
||||
|
||||
def mirror_repositories
|
||||
mirror_repository(type: Gitlab::GlRepository::PROJECT) if project.repository_exists?
|
||||
if project.repository_exists?
|
||||
mirror_repository(type: Gitlab::GlRepository::PROJECT)
|
||||
end
|
||||
|
||||
if project.wiki.repository_exists?
|
||||
mirror_repository(type: Gitlab::GlRepository::WIKI)
|
||||
|
|
@ -25,6 +33,30 @@ module Projects
|
|||
end
|
||||
end
|
||||
|
||||
def mirror_object_pool(destination_storage_name)
|
||||
return unless replicate_object_pool_on_move_ff_enabled?
|
||||
return unless project.repository_exists?
|
||||
|
||||
pool_repository = project.pool_repository
|
||||
return unless pool_repository
|
||||
|
||||
# If pool repository already exists, then we will link the moved project repository to it
|
||||
return if pool_repository_exists_for?(shard_name: destination_storage_name, pool_repository: pool_repository)
|
||||
|
||||
target_pool_repository = create_pool_repository_for!(
|
||||
shard_name: destination_storage_name,
|
||||
pool_repository: pool_repository
|
||||
)
|
||||
|
||||
checksum, new_checksum = replicate_object_pool_repository(from: pool_repository, to: target_pool_repository)
|
||||
|
||||
if checksum != new_checksum
|
||||
raise Error,
|
||||
format(s_('UpdateRepositoryStorage|Failed to verify %{type} repository checksum from %{old} to %{new}'),
|
||||
type: 'object_pool', old: checksum, new: new_checksum)
|
||||
end
|
||||
end
|
||||
|
||||
def remove_old_paths
|
||||
super
|
||||
|
||||
|
|
@ -46,5 +78,39 @@ module Projects
|
|||
).remove
|
||||
end
|
||||
end
|
||||
|
||||
def pool_repository_exists_for?(shard_name:, pool_repository:)
|
||||
PoolRepository.by_source_project_and_shard_name(
|
||||
pool_repository.source_project,
|
||||
shard_name
|
||||
).exists?
|
||||
end
|
||||
|
||||
def create_pool_repository_for!(shard_name:, pool_repository:)
|
||||
# Set state `ready` because we manually replicate object pool
|
||||
PoolRepository.create!(
|
||||
shard: Shard.by_name(shard_name),
|
||||
source_project: pool_repository.source_project,
|
||||
disk_path: pool_repository.disk_path,
|
||||
state: 'ready'
|
||||
)
|
||||
end
|
||||
|
||||
def replicate_object_pool_repository(from:, to:)
|
||||
old_object_pool = from.object_pool
|
||||
new_object_pool = to.object_pool
|
||||
|
||||
checksum = old_object_pool.repository.checksum
|
||||
|
||||
new_object_pool.repository.replicate(old_object_pool.repository)
|
||||
|
||||
new_checksum = new_object_pool.repository.checksum
|
||||
|
||||
[checksum, new_checksum]
|
||||
end
|
||||
|
||||
def replicate_object_pool_on_move_ff_enabled?
|
||||
Feature.enabled?(:replicate_object_pool_on_move, project)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
description: A snippet has been edited from the Web IDE
|
||||
category: InternalEventTracking
|
||||
action: g_edit_by_snippet_ide
|
||||
label_description:
|
||||
property_description:
|
||||
value_description:
|
||||
extra_properties:
|
||||
identifiers:
|
||||
- project
|
||||
- user
|
||||
- namespace
|
||||
product_section: dev
|
||||
product_stage: create
|
||||
product_group: source_code
|
||||
milestone: "16.3"
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128592
|
||||
distributions:
|
||||
- ce
|
||||
- ee
|
||||
tiers:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
||||
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
description: A file has been edited from the Web IDE
|
||||
category: InternalEventTracking
|
||||
action: g_edit_by_web_ide
|
||||
label_description:
|
||||
property_description:
|
||||
value_description:
|
||||
extra_properties:
|
||||
identifiers:
|
||||
- project
|
||||
- user
|
||||
- namespace
|
||||
product_section: dev
|
||||
product_stage: create
|
||||
product_group: source_code
|
||||
milestone: "16.3"
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128592
|
||||
distributions:
|
||||
- ce
|
||||
- ee
|
||||
tiers:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
||||
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
description: A file has been edited from the single file editor
|
||||
category: InternalEventTracking
|
||||
action: g_edit_by_sfe
|
||||
label_description:
|
||||
property_description:
|
||||
value_description:
|
||||
extra_properties:
|
||||
identifiers:
|
||||
- project
|
||||
- user
|
||||
- namespace
|
||||
product_section: dev
|
||||
product_stage: create
|
||||
product_group: source_code
|
||||
milestone: "16.3"
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128592
|
||||
distributions:
|
||||
- ce
|
||||
- ee
|
||||
tiers:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
||||
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: replicate_object_pool_on_move
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/127143
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/420720
|
||||
milestone: '16.3'
|
||||
type: development
|
||||
group: group::source code
|
||||
default_enabled: false
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
---
|
||||
name: kubernetes_namespace_for_environment
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/125191
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/417129
|
||||
milestone: '16.2'
|
||||
type: development
|
||||
group: group::environments
|
||||
name: emit_sidekiq_histogram_metrics
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128706
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/421499
|
||||
milestone: '16.3'
|
||||
type: ops
|
||||
group: group::scalability
|
||||
default_enabled: true
|
||||
|
|
@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric
|
|||
options:
|
||||
events:
|
||||
- g_edit_by_web_ide
|
||||
events:
|
||||
- name: g_edit_by_web_ide
|
||||
unique: user.id
|
||||
distribution:
|
||||
- ce
|
||||
- ee
|
||||
|
|
|
|||
|
|
@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric
|
|||
options:
|
||||
events:
|
||||
- g_edit_by_sfe
|
||||
events:
|
||||
- name: g_edit_by_sfe
|
||||
unique: user.id
|
||||
distribution:
|
||||
- ce
|
||||
- ee
|
||||
|
|
|
|||
|
|
@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric
|
|||
options:
|
||||
events:
|
||||
- g_edit_by_snippet_ide
|
||||
events:
|
||||
- name: g_edit_by_snippet_ide
|
||||
unique: user.id
|
||||
distribution:
|
||||
- ce
|
||||
- ee
|
||||
|
|
|
|||
|
|
@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric
|
|||
options:
|
||||
events:
|
||||
- g_edit_by_web_ide
|
||||
events:
|
||||
- name: g_edit_by_web_ide
|
||||
unique: user.id
|
||||
distribution:
|
||||
- ce
|
||||
- ee
|
||||
|
|
|
|||
|
|
@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric
|
|||
options:
|
||||
events:
|
||||
- g_edit_by_sfe
|
||||
events:
|
||||
- name: g_edit_by_sfe
|
||||
unique: user.id
|
||||
distribution:
|
||||
- ce
|
||||
- ee
|
||||
|
|
|
|||
|
|
@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric
|
|||
options:
|
||||
events:
|
||||
- g_edit_by_snippet_ide
|
||||
events:
|
||||
- name: g_edit_by_snippet_ide
|
||||
unique: user.id
|
||||
distribution:
|
||||
- ce
|
||||
- ee
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddCiJobAnnotationsPlanLimits < Gitlab::Database::Migration[2.1]
|
||||
def change
|
||||
add_column :plan_limits, :ci_max_artifact_size_annotations, :integer, null: false, default: 0
|
||||
add_column :plan_limits, :ci_job_annotations_size, :integer, null: false, default: 81920
|
||||
add_column :plan_limits, :ci_job_annotations_num, :integer, null: false, default: 20
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
2e2c9416a8c60fc7273f732b2890ee84dcd864ff68a269a2ca1603e6bfaf4c31
|
||||
|
|
@ -20444,7 +20444,10 @@ CREATE TABLE plan_limits (
|
|||
google_cloud_logging_configurations integer DEFAULT 5 NOT NULL,
|
||||
ml_model_max_file_size bigint DEFAULT '10737418240'::bigint NOT NULL,
|
||||
limits_history jsonb DEFAULT '{}'::jsonb NOT NULL,
|
||||
updated_at timestamp with time zone DEFAULT now() NOT NULL
|
||||
updated_at timestamp with time zone DEFAULT now() NOT NULL,
|
||||
ci_max_artifact_size_annotations integer DEFAULT 0 NOT NULL,
|
||||
ci_job_annotations_size integer DEFAULT 81920 NOT NULL,
|
||||
ci_job_annotations_num integer DEFAULT 20 NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE plan_limits_id_seq
|
||||
|
|
|
|||
|
|
@ -26829,6 +26829,7 @@ Iteration ID wildcard values.
|
|||
| Value | Description |
|
||||
| ----- | ----------- |
|
||||
| <a id="jobartifactfiletypeaccessibility"></a>`ACCESSIBILITY` | ACCESSIBILITY job artifact file type. |
|
||||
| <a id="jobartifactfiletypeannotations"></a>`ANNOTATIONS` | ANNOTATIONS job artifact file type. |
|
||||
| <a id="jobartifactfiletypeapi_fuzzing"></a>`API_FUZZING` | API FUZZING job artifact file type. |
|
||||
| <a id="jobartifactfiletypearchive"></a>`ARCHIVE` | ARCHIVE job artifact file type. |
|
||||
| <a id="jobartifactfiletypebrowser_performance"></a>`BROWSER_PERFORMANCE` | BROWSER PERFORMANCE job artifact file type. |
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ For Flux users, the synchronization status of a given environment is not display
|
|||
## Configure a dashboard
|
||||
|
||||
> - Filtering resources by namespace [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/403618) in GitLab 16.2 [with a flag](../../administration/feature_flags.md) named `kubernetes_namespace_for_environment`. Disabled by default.
|
||||
> - Feature flag `kubernetes_namespace_for_environment` [enabled by default](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/127043) in GitLab 16.3.
|
||||
> - Filtering resources by namespace [enabled by default](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/127043) in GitLab 16.3. Feature flag `kubernetes_namespace_for_environment` removed.
|
||||
|
||||
Configure a dashboard to use it for a given environment.
|
||||
You can configure dashboard for an environment that already exists, or
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ You can also review the stack trace.
|
|||
|
||||
### Supported language SDKs & Sentry types
|
||||
|
||||
In the following table, you can see a list of all event types available through Sentry SDK, and whether they are supported by GitLab Error Tracking.
|
||||
The following table lists all event types available through Sentry SDK, and whether they are supported by GitLab Error Tracking.
|
||||
|
||||
| Language | Tested SDK client and version | Endpoint | Supported item types |
|
||||
| -------- | ------------------------------- | ---------- | --------------------------------- |
|
||||
|
|
@ -201,9 +201,7 @@ to your runner's `config.toml` configuration file, as referenced in
|
|||
|
||||
If you're asked for the project type while setting up Sentry, select **Go**.
|
||||
|
||||
If you see the following error in your GitLab Runner logs, then you should
|
||||
specify the deprecated
|
||||
DSN in **Sentry.io > Project Settings > Client Keys (DSN) > Show deprecated DSN**.
|
||||
To rectify the following error, specify the deprecated DSN in **Sentry.io > Project Settings > Client Keys (DSN) > Show deprecated DSN**.
|
||||
|
||||
```plaintext
|
||||
ERROR: Sentry failure builds=0 error=raven: dsn missing private key
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ and, for autopilot clusters, to add configurations that specify which jobs to ru
|
|||
1. Verify that you are connected to the cluster:
|
||||
|
||||
```shell
|
||||
kubectl config view current-context
|
||||
kubectl config current-context
|
||||
```
|
||||
|
||||
## Install and configure the Kubernetes Operator
|
||||
|
|
@ -74,7 +74,7 @@ Now that you have a cluster, you're ready to install and configure the Kubernete
|
|||
1. Install the prerequisites:
|
||||
|
||||
```shell
|
||||
kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.7.1/cert-manager.yaml
|
||||
kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.7.1/cert-manager.yaml
|
||||
```
|
||||
|
||||
1. Install the Operator Lifecycle Manager (OLM), a tool that manages the Kubernetes Operators that
|
||||
|
|
|
|||
|
|
@ -233,11 +233,10 @@ For members with `Minimal Access` in the selected group, their `Max Role` and `S
|
|||
|
||||
## User cap for groups
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/330027) in GitLab 14.7.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/330027) in GitLab 14.7 [with a flag](../../administration/feature_flags.md) named `saas_user_caps`. Disabled by default.
|
||||
> - [Enabled on GitLab.com](https://gitlab.com/groups/gitlab-org/-/epics/9263) in GitLab 16.3.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, this feature is not available. On GitLab.com, this feature is available for some groups.
|
||||
This feature is not ready for production use.
|
||||
For more information about user caps for GitLab self-managed, see [User cap](../../administration/settings/sign_up_restrictions.md#user-cap).
|
||||
|
||||
When the number of billable members reaches the user cap, new users can't be added to the group
|
||||
without being approved by the group owner.
|
||||
|
|
@ -301,6 +300,16 @@ To approve members that are pending because they've exceeded the user cap:
|
|||
1. On the **Seats** tab, under the alert, select **View pending approvals**.
|
||||
1. For each member you want to approve, select **Approve**.
|
||||
|
||||
### Known issues
|
||||
|
||||
The user cap cannot be enabled if a group, subgroup, or project is shared externally. If a group, subgroup,
|
||||
or project is shared externally, it is shared outside of the namespace hierarchy, regardless of its level
|
||||
in the hierarchy.
|
||||
|
||||
To ensure that the user cap applies when groups, subgroups, or projects are shared externally, restrict group sharing only within the top-level namespace. This ensure that groups in the same top-leve namespace can be invited, and prevents the addition of new users (seats) when the group is shared.
|
||||
|
||||
User cap doesn’t consider whether users are billable or not (e.g., Free Guest Users in Ultimate). In other words, if you set a cap of 500, user caps block new sign-ups after 500 users, regardless of whether those are all consuming paid seats or not.
|
||||
|
||||
## Group file templates **(PREMIUM)**
|
||||
|
||||
Use group file templates to share a set of templates for common file
|
||||
|
|
|
|||
|
|
@ -151,7 +151,7 @@ To integrate Microsoft Azure AD, you:
|
|||
1. Select **Microsoft Graph > Application permissions**.
|
||||
1. Select the checkboxes **GroupMember.Read.All** and **User.Read.All**.
|
||||
1. Select **Add permissions** to save.
|
||||
1. Select **Grant admin consent for <application name>**, then on the confirmation dialog select **Yes**. The **Status** column for both permissions should change to a green check with **Granted for <application name>**.
|
||||
1. Select **Grant admin consent for `<application_name>`**, then on the confirmation dialog select **Yes**. The **Status** column for both permissions should change to a green check with **Granted for `<application_name>`**.
|
||||
|
||||
<!-- vale gitlab.SentenceSpacing = YES -->
|
||||
|
||||
|
|
|
|||
|
|
@ -14,11 +14,12 @@ module Gitlab
|
|||
def find(timeout: nil)
|
||||
if ignore_alternate_directories?
|
||||
blobs = repository.list_all_blobs(bytes_limit: 0, dynamic_timeout: timeout,
|
||||
ignore_alternate_object_directories: true)
|
||||
ignore_alternate_object_directories: true).to_a
|
||||
|
||||
blobs.select do |blob|
|
||||
blobs.select! do |blob|
|
||||
::Gitlab::Utils.bytes_to_megabytes(blob.size) > file_size_limit_megabytes
|
||||
end
|
||||
filter_existing(blobs)
|
||||
else
|
||||
any_oversize_blobs.find(timeout: timeout)
|
||||
end
|
||||
|
|
@ -28,6 +29,15 @@ module Gitlab
|
|||
|
||||
attr_reader :project, :repository, :changes, :file_size_limit_megabytes
|
||||
|
||||
def filter_existing(blobs)
|
||||
gitaly_repo = repository.gitaly_repository.dup.tap { |repo| repo.git_object_directory = "" }
|
||||
|
||||
map_blob_id_to_existence = repository.gitaly_commit_client.object_existence_map(blobs.map(&:id),
|
||||
gitaly_repo: gitaly_repo)
|
||||
|
||||
blobs.reject { |blob| map_blob_id_to_existence[blob.id].present? }
|
||||
end
|
||||
|
||||
def ignore_alternate_directories?
|
||||
git_env = ::Gitlab::Git::HookEnv.all(repository.gl_repository)
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ module Gitlab
|
|||
dast performance browser_performance load_performance license_scanning metrics lsif
|
||||
dotenv terraform accessibility
|
||||
coverage_fuzzing api_fuzzing cluster_image_scanning
|
||||
requirements requirements_v2 coverage_report cyclonedx].freeze
|
||||
requirements requirements_v2 coverage_report cyclonedx annotations].freeze
|
||||
|
||||
attributes ALLOWED_KEYS
|
||||
|
||||
|
|
@ -50,6 +50,7 @@ module Gitlab
|
|||
validates :requirements, array_of_strings_or_string: true
|
||||
validates :requirements_v2, array_of_strings_or_string: true
|
||||
validates :cyclonedx, array_of_strings_or_string: true
|
||||
validates :annotations, array_of_strings_or_string: true
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,54 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Metrics
|
||||
module Dashboard
|
||||
module Transformers
|
||||
module Yml
|
||||
module V1
|
||||
# Takes a JSON schema validated dashboard hash and
|
||||
# maps it to PrometheusMetric model attributes
|
||||
class PrometheusMetrics
|
||||
def initialize(dashboard_hash, project: nil, dashboard_path: nil)
|
||||
@dashboard_hash = dashboard_hash.with_indifferent_access
|
||||
@project = project
|
||||
@dashboard_path = dashboard_path
|
||||
|
||||
@dashboard_hash.default_proc = -> (h, k) { raise Transformers::Errors::MissingAttribute, k.to_s }
|
||||
end
|
||||
|
||||
def execute
|
||||
prometheus_metrics = []
|
||||
|
||||
dashboard_hash[:panel_groups].each do |panel_group|
|
||||
panel_group[:panels].each do |panel|
|
||||
panel[:metrics].each do |metric|
|
||||
prometheus_metrics << {
|
||||
project: project,
|
||||
title: panel[:title],
|
||||
y_label: panel[:y_label],
|
||||
query: metric[:query_range] || metric[:query],
|
||||
unit: metric[:unit],
|
||||
legend: metric[:label],
|
||||
identifier: metric[:id],
|
||||
group: Enums::PrometheusMetric.groups[:custom],
|
||||
common: false,
|
||||
dashboard_path: dashboard_path
|
||||
}.compact
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
prometheus_metrics
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :dashboard_hash, :project, :dashboard_path
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Metrics
|
||||
module Dashboard
|
||||
module Validator
|
||||
DASHBOARD_SCHEMA_PATH = Rails.root.join(*%w[lib gitlab metrics dashboard validator schemas dashboard.json]).freeze
|
||||
|
||||
class << self
|
||||
def validate(content, schema_path = DASHBOARD_SCHEMA_PATH, dashboard_path: nil, project: nil)
|
||||
errors(content, schema_path, dashboard_path: dashboard_path, project: project).empty?
|
||||
end
|
||||
|
||||
def validate!(content, schema_path = DASHBOARD_SCHEMA_PATH, dashboard_path: nil, project: nil)
|
||||
errors = errors(content, schema_path, dashboard_path: dashboard_path, project: project)
|
||||
errors.empty? || raise(errors.first)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def errors(content, schema_path = DASHBOARD_SCHEMA_PATH, dashboard_path: nil, project: nil)
|
||||
Validator::Client
|
||||
.new(content, schema_path, dashboard_path: dashboard_path, project: project)
|
||||
.execute
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Metrics
|
||||
module Dashboard
|
||||
module Validator
|
||||
class Client
|
||||
# @param content [Hash] Representing a raw, unprocessed
|
||||
# dashboard object
|
||||
# @param schema_path [String] Representing path to dashboard schema file
|
||||
# @param dashboard_path[String] Representing path to dashboard content file
|
||||
# @param project [Project] Project to validate dashboard against
|
||||
def initialize(content, schema_path, dashboard_path: nil, project: nil)
|
||||
@content = content
|
||||
@schema_path = schema_path
|
||||
@dashboard_path = dashboard_path
|
||||
@project = project
|
||||
end
|
||||
|
||||
def execute
|
||||
errors = validate_against_schema
|
||||
errors += post_schema_validator.validate
|
||||
|
||||
errors.compact
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :content, :schema_path, :project, :dashboard_path
|
||||
|
||||
def custom_formats
|
||||
@custom_formats ||= CustomFormats.new
|
||||
end
|
||||
|
||||
def post_schema_validator
|
||||
PostSchemaValidator.new(
|
||||
project: project,
|
||||
metric_ids: custom_formats.metric_ids_cache,
|
||||
dashboard_path: dashboard_path
|
||||
)
|
||||
end
|
||||
|
||||
def schemer
|
||||
@schemer ||= ::JSONSchemer.schema(Pathname.new(schema_path), formats: custom_formats.format_handlers)
|
||||
end
|
||||
|
||||
def validate_against_schema
|
||||
schemer.validate(content).map do |error|
|
||||
::Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError.new(error)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Metrics
|
||||
module Dashboard
|
||||
module Validator
|
||||
class CustomFormats
|
||||
def format_handlers
|
||||
# Key is custom JSON Schema format name. Value is a proc that takes data and schema and handles
|
||||
# validations.
|
||||
@format_handlers ||= {
|
||||
"add_to_metric_id_cache" => ->(data, schema) { metric_ids_cache << data }
|
||||
}
|
||||
end
|
||||
|
||||
def metric_ids_cache
|
||||
@metric_ids_cache ||= []
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Metrics
|
||||
module Dashboard
|
||||
module Validator
|
||||
module Errors
|
||||
InvalidDashboardError = Class.new(StandardError)
|
||||
|
||||
class SchemaValidationError < InvalidDashboardError
|
||||
def initialize(error = {})
|
||||
super(error_message(error))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def error_message(error)
|
||||
if error.is_a?(Hash) && error.present?
|
||||
pretty(error)
|
||||
else
|
||||
"Dashboard failed schema validation"
|
||||
end
|
||||
end
|
||||
|
||||
# based on https://github.com/davishmcclurg/json_schemer/blob/master/lib/json_schemer/errors.rb
|
||||
# with addition ability to translate error messages
|
||||
def pretty(error)
|
||||
data, data_pointer, type, schema = error.values_at('data', 'data_pointer', 'type', 'schema')
|
||||
location = data_pointer.empty? ? 'root' : data_pointer
|
||||
|
||||
case type
|
||||
when 'required'
|
||||
keys = error.fetch('details').fetch('missing_keys').join(', ')
|
||||
_("%{location} is missing required keys: %{keys}") % { location: location, keys: keys }
|
||||
when 'null', 'string', 'boolean', 'integer', 'number', 'array', 'object'
|
||||
_("'%{data}' at %{location} is not of type: %{type}") % { data: data, location: location, type: type }
|
||||
when 'pattern'
|
||||
_("'%{data}' at %{location} does not match pattern: %{pattern}") % { data: data, location: location, pattern: schema.fetch('pattern') }
|
||||
when 'format'
|
||||
_("'%{data}' at %{location} does not match format: %{format}") % { data: data, location: location, format: schema.fetch('format') }
|
||||
when 'const'
|
||||
_("'%{data}' at %{location} is not: %{const}") % { data: data, location: location, const: schema.fetch('const').inspect }
|
||||
when 'enum'
|
||||
_("'%{data}' at %{location} is not one of: %{enum}") % { data: data, location: location, enum: schema.fetch('enum') }
|
||||
else
|
||||
_("'%{data}' at %{location} is invalid: error_type=%{type}") % { data: data, location: location, type: type }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class DuplicateMetricIds < InvalidDashboardError
|
||||
def initialize
|
||||
super(_("metric_id must be unique across a project"))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Metrics
|
||||
module Dashboard
|
||||
module Validator
|
||||
class PostSchemaValidator
|
||||
def initialize(metric_ids:, project: nil, dashboard_path: nil)
|
||||
@metric_ids = metric_ids
|
||||
@project = project
|
||||
@dashboard_path = dashboard_path
|
||||
end
|
||||
|
||||
def validate
|
||||
errors = []
|
||||
errors << uniq_metric_ids
|
||||
errors.compact
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project, :metric_ids, :dashboard_path
|
||||
|
||||
def uniq_metric_ids
|
||||
return Validator::Errors::DuplicateMetricIds.new if metric_ids.uniq!
|
||||
|
||||
uniq_metric_ids_across_project if project.present? || dashboard_path.present?
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def uniq_metric_ids_across_project
|
||||
return ArgumentError.new(_('Both project and dashboard_path are required')) unless
|
||||
dashboard_path.present? && project.present?
|
||||
|
||||
# If PrometheusMetric identifier is not unique across project and dashboard_path,
|
||||
# we need to error because we don't know if the user is trying to create a new metric
|
||||
# or update an existing one.
|
||||
identifier_on_other_dashboard = PrometheusMetric.where(
|
||||
project: project,
|
||||
identifier: metric_ids
|
||||
).where.not(
|
||||
dashboard_path: dashboard_path
|
||||
).exists?
|
||||
|
||||
Validator::Errors::DuplicateMetricIds.new if identifier_on_other_dashboard
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"format": {
|
||||
"type": "string",
|
||||
"default": "engineering"
|
||||
},
|
||||
"precision": {
|
||||
"type": "number",
|
||||
"default": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": ["dashboard", "panel_groups"],
|
||||
"properties": {
|
||||
"dashboard": { "type": "string" },
|
||||
"panel_groups": {
|
||||
"type": "array",
|
||||
"items": { "$ref": "./panel_group.json" }
|
||||
},
|
||||
"templating": {
|
||||
"$ref": "./templating.json"
|
||||
},
|
||||
"links": {
|
||||
"type": "array",
|
||||
"items": { "$ref": "./link.json" }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": ["url"],
|
||||
"properties": {
|
||||
"url": { "type": "string" },
|
||||
"title": { "type": "string" },
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": ["grafana"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": ["unit"],
|
||||
"oneOf": [{ "required": ["query"] }, { "required": ["query_range"] }],
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "add_to_metric_id_cache"
|
||||
},
|
||||
"unit": { "type": "string" },
|
||||
"label": { "type": "string" },
|
||||
"query": { "type": ["string", "number"] },
|
||||
"query_range": { "type": ["string", "number"] },
|
||||
"step": { "type": "number" }
|
||||
}
|
||||
}
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": ["title", "metrics"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": ["area-chart", "line-chart", "anomaly-chart", "bar", "column", "stacked-column", "single-stat", "heatmap", "gauge"],
|
||||
"default": "area-chart"
|
||||
},
|
||||
"title": { "type": "string" },
|
||||
"y_label": { "type": "string" },
|
||||
"y_axis": { "$ref": "./axis.json" },
|
||||
"max_value": { "type": "number" },
|
||||
"weight": { "type": "number" },
|
||||
"metrics": {
|
||||
"type": "array",
|
||||
"items": { "$ref": "./metric.json" }
|
||||
},
|
||||
"links": {
|
||||
"type": "array",
|
||||
"items": { "$ref": "./link.json" }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": ["group", "panels"],
|
||||
"properties": {
|
||||
"group": { "type": "string" },
|
||||
"priority": { "type": "number" },
|
||||
"panels": {
|
||||
"type": "array",
|
||||
"items": { "$ref": "./panel.json" }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": ["variables"],
|
||||
"properties": {
|
||||
"variables": { "type": "object" }
|
||||
}
|
||||
}
|
||||
|
|
@ -11,9 +11,9 @@ module Gitlab
|
|||
# most of the durations for cpu, gitaly, db and elasticsearch
|
||||
SIDEKIQ_LATENCY_BUCKETS = [0.1, 0.5, 1, 2.5].freeze
|
||||
|
||||
# These are the buckets we currently use for alerting, we will likely
|
||||
# replace these histograms with Application SLIs
|
||||
# https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1313
|
||||
# These buckets are only available on self-managed.
|
||||
# We have replaced with Application SLIs on GitLab.com.
|
||||
# https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/700
|
||||
SIDEKIQ_JOB_DURATION_BUCKETS = [10, 300].freeze
|
||||
SIDEKIQ_QUEUE_DURATION_BUCKETS = [10, 60].freeze
|
||||
|
||||
|
|
@ -24,15 +24,12 @@ module Gitlab
|
|||
include ::Gitlab::SidekiqMiddleware::MetricsHelper
|
||||
|
||||
def metrics
|
||||
{
|
||||
metrics = {
|
||||
sidekiq_jobs_cpu_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_cpu_seconds, 'Seconds this Sidekiq job spent on the CPU', {}, SIDEKIQ_LATENCY_BUCKETS),
|
||||
sidekiq_jobs_completion_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_completion_seconds, 'Seconds to complete Sidekiq job', {}, SIDEKIQ_JOB_DURATION_BUCKETS),
|
||||
sidekiq_jobs_db_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_db_seconds, 'Seconds of database time to run Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS),
|
||||
sidekiq_jobs_gitaly_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_gitaly_seconds, 'Seconds of Gitaly time to run Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS),
|
||||
sidekiq_jobs_queue_duration_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_queue_duration_seconds, 'Duration in seconds that a Sidekiq job was queued before being executed', {}, SIDEKIQ_QUEUE_DURATION_BUCKETS),
|
||||
sidekiq_redis_requests_duration_seconds: ::Gitlab::Metrics.histogram(:sidekiq_redis_requests_duration_seconds, 'Duration in seconds that a Sidekiq job spent requests a Redis server', {}, Gitlab::Instrumentation::Redis::QUERY_TIME_BUCKETS),
|
||||
sidekiq_elasticsearch_requests_duration_seconds: ::Gitlab::Metrics.histogram(:sidekiq_elasticsearch_requests_duration_seconds, 'Duration in seconds that a Sidekiq job spent in requests to an Elasticsearch server', {}, SIDEKIQ_LATENCY_BUCKETS),
|
||||
sidekiq_jobs_failed_total: ::Gitlab::Metrics.counter(:sidekiq_jobs_failed_total, 'Sidekiq jobs failed'),
|
||||
sidekiq_jobs_retried_total: ::Gitlab::Metrics.counter(:sidekiq_jobs_retried_total, 'Sidekiq jobs retried'),
|
||||
sidekiq_jobs_interrupted_total: ::Gitlab::Metrics.counter(:sidekiq_jobs_interrupted_total, 'Sidekiq jobs interrupted'),
|
||||
sidekiq_redis_requests_total: ::Gitlab::Metrics.counter(:sidekiq_redis_requests_total, 'Redis requests during a Sidekiq job execution'),
|
||||
|
|
@ -41,6 +38,17 @@ module Gitlab
|
|||
sidekiq_concurrency: ::Gitlab::Metrics.gauge(:sidekiq_concurrency, 'Maximum number of Sidekiq jobs', {}, :all),
|
||||
sidekiq_mem_total_bytes: ::Gitlab::Metrics.gauge(:sidekiq_mem_total_bytes, 'Number of bytes allocated for both objects consuming an object slot and objects that required a malloc', {}, :all)
|
||||
}
|
||||
|
||||
if Feature.enabled?(:emit_sidekiq_histogram_metrics, type: :ops)
|
||||
metrics[:sidekiq_jobs_completion_seconds] = ::Gitlab::Metrics.histogram(:sidekiq_jobs_completion_seconds, 'Seconds to complete Sidekiq job', {}, SIDEKIQ_JOB_DURATION_BUCKETS)
|
||||
metrics[:sidekiq_jobs_queue_duration_seconds] = ::Gitlab::Metrics.histogram(:sidekiq_jobs_queue_duration_seconds, 'Duration in seconds that a Sidekiq job was queued before being executed', {}, SIDEKIQ_QUEUE_DURATION_BUCKETS)
|
||||
metrics[:sidekiq_jobs_failed_total] = ::Gitlab::Metrics.counter(:sidekiq_jobs_failed_total, 'Sidekiq jobs failed')
|
||||
else
|
||||
# The sum metric is still used in GitLab.com for dashboards
|
||||
metrics[:sidekiq_jobs_completion_seconds_sum] = ::Gitlab::Metrics.counter(:sidekiq_jobs_completion_seconds_sum, 'Total of seconds to complete Sidekiq job')
|
||||
end
|
||||
|
||||
metrics
|
||||
end
|
||||
|
||||
def initialize_process_metrics
|
||||
|
|
@ -59,6 +67,8 @@ module Gitlab
|
|||
base_labels = create_labels(worker_class, queue, {})
|
||||
possible_sli_labels << base_labels.slice(*SIDEKIQ_SLI_LABELS)
|
||||
|
||||
next unless Feature.enabled?(:emit_sidekiq_histogram_metrics, type: :ops)
|
||||
|
||||
%w[done fail].each do |status|
|
||||
metrics[:sidekiq_jobs_completion_seconds].get(base_labels.merge(job_status: status))
|
||||
end
|
||||
|
|
@ -92,7 +102,8 @@ module Gitlab
|
|||
def instrument(job, labels)
|
||||
queue_duration = ::Gitlab::InstrumentationHelper.queue_duration_for_job(job)
|
||||
|
||||
@metrics[:sidekiq_jobs_queue_duration_seconds].observe(labels, queue_duration) if queue_duration
|
||||
@metrics[:sidekiq_jobs_queue_duration_seconds]&.observe(labels, queue_duration) if queue_duration
|
||||
|
||||
@metrics[:sidekiq_running_jobs].increment(labels, 1)
|
||||
|
||||
if job['retry_count'].present?
|
||||
|
|
@ -119,13 +130,21 @@ module Gitlab
|
|||
|
||||
# sidekiq_running_jobs, sidekiq_jobs_failed_total should not include the job_status label
|
||||
@metrics[:sidekiq_running_jobs].increment(labels, -1)
|
||||
@metrics[:sidekiq_jobs_failed_total].increment(labels, 1) unless job_succeeded
|
||||
|
||||
if Feature.enabled?(:emit_sidekiq_histogram_metrics, type: :ops)
|
||||
@metrics[:sidekiq_jobs_failed_total].increment(labels, 1) unless job_succeeded
|
||||
else
|
||||
# we don't need job_status label here
|
||||
@metrics[:sidekiq_jobs_completion_seconds_sum].increment(labels, monotonic_time)
|
||||
end
|
||||
|
||||
# job_status: done, fail match the job_status attribute in structured logging
|
||||
labels[:job_status] = job_succeeded ? "done" : "fail"
|
||||
instrumentation = job[:instrumentation] || {}
|
||||
@metrics[:sidekiq_jobs_cpu_seconds].observe(labels, job_thread_cputime)
|
||||
@metrics[:sidekiq_jobs_completion_seconds].observe(labels, monotonic_time)
|
||||
|
||||
@metrics[:sidekiq_jobs_completion_seconds]&.observe(labels, monotonic_time)
|
||||
|
||||
@metrics[:sidekiq_jobs_db_seconds].observe(labels, ActiveRecord::LogSubscriber.runtime / 1000)
|
||||
@metrics[:sidekiq_jobs_gitaly_seconds].observe(labels, get_gitaly_time(instrumentation))
|
||||
@metrics[:sidekiq_redis_requests_total].increment(labels, get_redis_calls(instrumentation))
|
||||
|
|
|
|||
|
|
@ -9,24 +9,24 @@ module Gitlab
|
|||
EDIT_CATEGORY = 'ide_edit'
|
||||
|
||||
class << self
|
||||
def track_web_ide_edit_action(author:, time: Time.zone.now, project:)
|
||||
track_unique_action(EDIT_BY_WEB_IDE, author, time, project)
|
||||
def track_web_ide_edit_action(author:, project:)
|
||||
track_internal_event(EDIT_BY_WEB_IDE, author, project)
|
||||
end
|
||||
|
||||
def count_web_ide_edit_actions(date_from:, date_to:)
|
||||
count_unique(EDIT_BY_WEB_IDE, date_from, date_to)
|
||||
end
|
||||
|
||||
def track_sfe_edit_action(author:, time: Time.zone.now, project:)
|
||||
track_unique_action(EDIT_BY_SFE, author, time, project)
|
||||
def track_sfe_edit_action(author:, project:)
|
||||
track_internal_event(EDIT_BY_SFE, author, project)
|
||||
end
|
||||
|
||||
def count_sfe_edit_actions(date_from:, date_to:)
|
||||
count_unique(EDIT_BY_SFE, date_from, date_to)
|
||||
end
|
||||
|
||||
def track_snippet_editor_edit_action(author:, time: Time.zone.now, project:)
|
||||
track_unique_action(EDIT_BY_SNIPPET_EDITOR, author, time, project)
|
||||
def track_snippet_editor_edit_action(author:, project:)
|
||||
track_internal_event(EDIT_BY_SNIPPET_EDITOR, author, project)
|
||||
end
|
||||
|
||||
def count_snippet_editor_edit_actions(date_from:, date_to:)
|
||||
|
|
@ -35,21 +35,15 @@ module Gitlab
|
|||
|
||||
private
|
||||
|
||||
def track_unique_action(event_name, author, time, project = nil)
|
||||
def track_internal_event(event_name, author, project = nil)
|
||||
return unless author
|
||||
|
||||
Gitlab::Tracking.event(
|
||||
name,
|
||||
'ide_edit',
|
||||
property: event_name.to_s,
|
||||
project: project,
|
||||
namespace: project&.namespace,
|
||||
Gitlab::InternalEvents.track_event(
|
||||
event_name,
|
||||
user: author,
|
||||
label: 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit',
|
||||
context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_context]
|
||||
project: project,
|
||||
namespace: project&.namespace
|
||||
)
|
||||
|
||||
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(event_name, values: author.id, time: time)
|
||||
end
|
||||
|
||||
def count_unique(actions, date_from, date_to)
|
||||
|
|
|
|||
|
|
@ -895,9 +895,6 @@ msgstr ""
|
|||
msgid "%{listToShow}, and %{awardsListLength} more"
|
||||
msgstr ""
|
||||
|
||||
msgid "%{location} is missing required keys: %{keys}"
|
||||
msgstr ""
|
||||
|
||||
msgid "%{lock_path} is locked by GitLab User %{lock_user_id}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -1334,24 +1331,6 @@ msgstr ""
|
|||
msgid "%{wildcards_link_start}Wildcards%{wildcards_link_end} such as %{code_tag_start}v*%{code_tag_end} or %{code_tag_start}*-release%{code_tag_end} are supported."
|
||||
msgstr ""
|
||||
|
||||
msgid "'%{data}' at %{location} does not match format: %{format}"
|
||||
msgstr ""
|
||||
|
||||
msgid "'%{data}' at %{location} does not match pattern: %{pattern}"
|
||||
msgstr ""
|
||||
|
||||
msgid "'%{data}' at %{location} is invalid: error_type=%{type}"
|
||||
msgstr ""
|
||||
|
||||
msgid "'%{data}' at %{location} is not of type: %{type}"
|
||||
msgstr ""
|
||||
|
||||
msgid "'%{data}' at %{location} is not one of: %{enum}"
|
||||
msgstr ""
|
||||
|
||||
msgid "'%{data}' at %{location} is not: %{const}"
|
||||
msgstr ""
|
||||
|
||||
msgid "'%{level}' is not a valid visibility level"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -8181,9 +8160,6 @@ msgstr ""
|
|||
msgid "Both SSH and HTTP(S)"
|
||||
msgstr ""
|
||||
|
||||
msgid "Both project and dashboard_path are required"
|
||||
msgstr ""
|
||||
|
||||
msgid "Branch"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -8577,6 +8553,9 @@ msgstr ""
|
|||
msgid "BroadcastMessages|Indigo"
|
||||
msgstr ""
|
||||
|
||||
msgid "BroadcastMessages|Leave blank to target all group and project pages."
|
||||
msgstr ""
|
||||
|
||||
msgid "BroadcastMessages|Light"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -8604,12 +8583,18 @@ msgstr ""
|
|||
msgid "BroadcastMessages|Notification"
|
||||
msgstr ""
|
||||
|
||||
msgid "BroadcastMessages|Paths can contain wildcards, like */welcome"
|
||||
msgid "BroadcastMessages|One or more roles is required."
|
||||
msgstr ""
|
||||
|
||||
msgid "BroadcastMessages|Paths can contain wildcards, like */welcome."
|
||||
msgstr ""
|
||||
|
||||
msgid "BroadcastMessages|Red"
|
||||
msgstr ""
|
||||
|
||||
msgid "BroadcastMessages|Select at least one role."
|
||||
msgstr ""
|
||||
|
||||
msgid "BroadcastMessages|Show only to users who have specific roles on groups/project pages"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -9945,12 +9930,18 @@ msgstr ""
|
|||
msgid "CiCatalog|About this project"
|
||||
msgstr ""
|
||||
|
||||
msgid "CiCatalog|Back to the CI/CD Catalog"
|
||||
msgstr ""
|
||||
|
||||
msgid "CiCatalog|CI/CD Catalog"
|
||||
msgstr ""
|
||||
|
||||
msgid "CiCatalog|CI/CD Catalog resource"
|
||||
msgstr ""
|
||||
|
||||
msgid "CiCatalog|Component ID not found, or you do not have permission to access component."
|
||||
msgstr ""
|
||||
|
||||
msgid "CiCatalog|Create a pipeline component repository and make reusing pipeline configurations faster and easier."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -9969,6 +9960,9 @@ msgstr ""
|
|||
msgid "CiCatalog|Mark project as a CI/CD Catalog resource. %{linkStart}What is the CI/CD Catalog?%{linkEnd}"
|
||||
msgstr ""
|
||||
|
||||
msgid "CiCatalog|No component available"
|
||||
msgstr ""
|
||||
|
||||
msgid "CiCatalog|No release available"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -13127,6 +13121,39 @@ msgstr ""
|
|||
msgid "ContributionEvent|Removed due to membership expiration from %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Reopened Epic %{targetLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Reopened incident %{targetLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Reopened issue %{targetLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Reopened key result %{targetLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Reopened merge request %{targetLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Reopened milestone %{targetLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Reopened objective %{targetLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Reopened requirement %{targetLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Reopened resource."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Reopened task %{targetLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Reopened test case %{targetLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|…and %{count} more commits. %{linkStart}Compare%{linkEnd}."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -55905,9 +55932,6 @@ msgstr[1] ""
|
|||
msgid "mergedCommitsAdded| (commits were squashed)"
|
||||
msgstr ""
|
||||
|
||||
msgid "metric_id must be unique across a project"
|
||||
msgstr ""
|
||||
|
||||
msgid "milestone"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -252,7 +252,7 @@
|
|||
"custom-jquery-matchers": "^2.1.0",
|
||||
"eslint": "8.46.0",
|
||||
"eslint-import-resolver-jest": "3.0.2",
|
||||
"eslint-import-resolver-webpack": "0.13.2",
|
||||
"eslint-import-resolver-webpack": "0.13.4",
|
||||
"eslint-plugin-import": "^2.28.0",
|
||||
"eslint-plugin-no-jquery": "2.7.0",
|
||||
"eslint-plugin-no-unsanitized": "^4.0.2",
|
||||
|
|
|
|||
|
|
@ -478,5 +478,15 @@ FactoryBot.define do
|
|||
artifact.file_sha256 = Digest::SHA256.file(artifact.file.path).hexdigest
|
||||
end
|
||||
end
|
||||
|
||||
trait :annotations do
|
||||
file_type { :annotations }
|
||||
file_format { :gzip }
|
||||
|
||||
after(:build) do |artifact, evaluator|
|
||||
artifact.file = fixture_file_upload(
|
||||
Rails.root.join('spec/fixtures/gl-annotations.json.gz'), 'application/x-gzip')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Binary file not shown.
|
|
@ -44,6 +44,7 @@ describe('MessageForm', () => {
|
|||
const findShowInCli = () => wrapper.findComponent('[data-testid=show-in-cli-checkbox]');
|
||||
const findTargetSelect = () => wrapper.findComponent('[data-testid=target-select]');
|
||||
const findTargetPath = () => wrapper.findComponent('[data-testid=target-path-input]');
|
||||
const emitSubmitForm = () => findForm().vm.$emit('submit', { preventDefault: () => {} });
|
||||
|
||||
function createComponent({ broadcastMessage = {} } = {}) {
|
||||
wrapper = mount(MessageForm, {
|
||||
|
|
@ -79,7 +80,7 @@ describe('MessageForm', () => {
|
|||
|
||||
it('renders the placeholder text when the user message is blank', () => {
|
||||
createComponent({ broadcastMessage: { message: ' ' } });
|
||||
expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.messagePlaceholder);
|
||||
expect(wrapper.text()).toContain(MessageForm.i18n.messagePlaceholder);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -129,13 +130,18 @@ describe('MessageForm', () => {
|
|||
|
||||
it('triggers displaying target path and target roles when selecting different options', async () => {
|
||||
createComponent();
|
||||
const targetPath = findTargetPath();
|
||||
const options = findTargetSelect().findAll('option');
|
||||
await options.at(1).setSelected();
|
||||
expect(findTargetPath().isVisible()).toBe(true);
|
||||
expect(targetPath.isVisible()).toBe(true);
|
||||
expect(targetPath.text()).toContain(MessageForm.i18n.targetPathDescription);
|
||||
expect(targetPath.text()).not.toContain(MessageForm.i18n.targetPathWithRolesReminder);
|
||||
expect(findTargetRoles().isVisible()).toBe(false);
|
||||
|
||||
await options.at(2).setSelected();
|
||||
expect(findTargetPath().isVisible()).toBe(true);
|
||||
expect(targetPath.isVisible()).toBe(true);
|
||||
expect(targetPath.text()).toContain(MessageForm.i18n.targetPathDescription);
|
||||
expect(targetPath.text()).toContain(MessageForm.i18n.targetPathWithRolesReminder);
|
||||
expect(findTargetRoles().isVisible()).toBe(true);
|
||||
});
|
||||
|
||||
|
|
@ -157,12 +163,12 @@ describe('MessageForm', () => {
|
|||
describe('form submit button', () => {
|
||||
it('renders the "add" text when the message is not persisted', () => {
|
||||
createComponent({ broadcastMessage: { id: undefined } });
|
||||
expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.add);
|
||||
expect(wrapper.text()).toContain(MessageForm.i18n.add);
|
||||
});
|
||||
|
||||
it('renders the "update" text when the message is persisted', () => {
|
||||
createComponent({ broadcastMessage: { id: 100 } });
|
||||
expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.update);
|
||||
expect(wrapper.text()).toContain(MessageForm.i18n.update);
|
||||
});
|
||||
|
||||
it('is disabled when the user message is blank', () => {
|
||||
|
|
@ -196,56 +202,86 @@ describe('MessageForm', () => {
|
|||
ends_at: defaultProps.endsAt,
|
||||
};
|
||||
|
||||
it('sends a create request for a new message form', async () => {
|
||||
createComponent({ broadcastMessage: { id: undefined } });
|
||||
findForm().vm.$emit('submit', { preventDefault: () => {} });
|
||||
await waitForPromises();
|
||||
describe('when creating a new message', () => {
|
||||
beforeEach(() => {
|
||||
createComponent({ broadcastMessage: { id: undefined } });
|
||||
});
|
||||
|
||||
expect(axiosMock.history.post).toHaveLength(2);
|
||||
expect(axiosMock.history.post[1]).toMatchObject({
|
||||
url: messagesPath,
|
||||
data: JSON.stringify(defaultPayload),
|
||||
it('sends a create request for a new message form', async () => {
|
||||
emitSubmitForm();
|
||||
await waitForPromises();
|
||||
|
||||
expect(axiosMock.history.post).toHaveLength(2);
|
||||
expect(axiosMock.history.post[1]).toMatchObject({
|
||||
url: messagesPath,
|
||||
data: JSON.stringify(defaultPayload),
|
||||
});
|
||||
});
|
||||
|
||||
it('shows an error alert if the create request fails', async () => {
|
||||
axiosMock.onPost(messagesPath).replyOnce(HTTP_STATUS_BAD_REQUEST);
|
||||
emitSubmitForm();
|
||||
await waitForPromises();
|
||||
|
||||
expect(createAlert).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
message: MessageForm.i18n.addError,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('shows an error alert if the create request fails', async () => {
|
||||
createComponent({ broadcastMessage: { id: undefined } });
|
||||
axiosMock.onPost(messagesPath).replyOnce(HTTP_STATUS_BAD_REQUEST);
|
||||
findForm().vm.$emit('submit', { preventDefault: () => {} });
|
||||
await waitForPromises();
|
||||
describe('when editing an existing message', () => {
|
||||
const mockId = 1337;
|
||||
|
||||
expect(createAlert).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
message: wrapper.vm.$options.i18n.addError,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('sends an update request for a persisted message form', async () => {
|
||||
const id = 1337;
|
||||
createComponent({ broadcastMessage: { id } });
|
||||
findForm().vm.$emit('submit', { preventDefault: () => {} });
|
||||
await waitForPromises();
|
||||
|
||||
expect(axiosMock.history.patch).toHaveLength(1);
|
||||
expect(axiosMock.history.patch[0]).toMatchObject({
|
||||
url: `${messagesPath}/${id}`,
|
||||
data: JSON.stringify(defaultPayload),
|
||||
beforeEach(() => {
|
||||
createComponent({ broadcastMessage: { id: mockId } });
|
||||
});
|
||||
});
|
||||
|
||||
it('shows an error alert if the update request fails', async () => {
|
||||
const id = 1337;
|
||||
createComponent({ broadcastMessage: { id } });
|
||||
axiosMock.onPost(`${messagesPath}/${id}`).replyOnce(HTTP_STATUS_BAD_REQUEST);
|
||||
findForm().vm.$emit('submit', { preventDefault: () => {} });
|
||||
await waitForPromises();
|
||||
it('sends an update request for a persisted message form', async () => {
|
||||
emitSubmitForm();
|
||||
await waitForPromises();
|
||||
|
||||
expect(createAlert).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
message: wrapper.vm.$options.i18n.updateError,
|
||||
}),
|
||||
);
|
||||
expect(axiosMock.history.patch).toHaveLength(1);
|
||||
expect(axiosMock.history.patch[0]).toMatchObject({
|
||||
url: `${messagesPath}/${mockId}`,
|
||||
data: JSON.stringify(defaultPayload),
|
||||
});
|
||||
});
|
||||
|
||||
it('shows an error alert if the update request fails', async () => {
|
||||
axiosMock.onPost(`${messagesPath}/${mockId}`).replyOnce(HTTP_STATUS_BAD_REQUEST);
|
||||
emitSubmitForm();
|
||||
await waitForPromises();
|
||||
|
||||
expect(createAlert).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
message: MessageForm.i18n.updateError,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('does not submit if target roles is required, and later does submit when validation is corrected', async () => {
|
||||
const options = findTargetSelect().findAll('option');
|
||||
await options.at(2).setSelected();
|
||||
|
||||
emitSubmitForm();
|
||||
await waitForPromises();
|
||||
|
||||
expect(axiosMock.history.patch).toHaveLength(0);
|
||||
expect(wrapper.text()).toContain(MessageForm.i18n.targetRolesValidationMsg);
|
||||
|
||||
await findTargetRoles().find('input[type="checkbox"]').setChecked();
|
||||
|
||||
emitSubmitForm();
|
||||
await waitForPromises();
|
||||
|
||||
expect(axiosMock.history.patch).toHaveLength(1);
|
||||
expect(axiosMock.history.patch[0]).toMatchObject({
|
||||
url: `${messagesPath}/${mockId}`,
|
||||
data: JSON.stringify({ ...defaultPayload, target_access_levels: [10] }),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,60 @@
|
|||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import ContributionEventReopened from '~/contribution_events/components/contribution_event/contribution_event_reopened.vue';
|
||||
import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue';
|
||||
import { TARGET_TYPE_WORK_ITEM } from '~/contribution_events/constants';
|
||||
import {
|
||||
eventMilestoneReopened,
|
||||
eventIssueReopened,
|
||||
eventMergeRequestReopened,
|
||||
eventTaskReopened,
|
||||
eventIncidentReopened,
|
||||
} from '../../utils';
|
||||
|
||||
describe('ContributionEventReopened', () => {
|
||||
let wrapper;
|
||||
|
||||
const createComponent = ({ propsData }) => {
|
||||
wrapper = shallowMountExtended(ContributionEventReopened, {
|
||||
propsData,
|
||||
});
|
||||
};
|
||||
|
||||
describe.each`
|
||||
event | expectedMessage | iconName
|
||||
${eventMilestoneReopened()} | ${'Reopened milestone %{targetLink} in %{resourceParentLink}.'} | ${'status_open'}
|
||||
${eventIssueReopened()} | ${'Reopened issue %{targetLink} in %{resourceParentLink}.'} | ${'status_open'}
|
||||
${eventMergeRequestReopened()} | ${'Reopened merge request %{targetLink} in %{resourceParentLink}.'} | ${'merge-request-open'}
|
||||
${{ target: { type: 'unsupported type' } }} | ${'Reopened resource.'} | ${'status_open'}
|
||||
`('when event target type is $event.target.type', ({ event, expectedMessage, iconName }) => {
|
||||
it('renders `ContributionEventBase` with correct props', () => {
|
||||
createComponent({ propsData: { event } });
|
||||
|
||||
expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
|
||||
event,
|
||||
message: expectedMessage,
|
||||
iconName,
|
||||
iconClass: 'gl-text-green-500',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe(`when event target type is ${TARGET_TYPE_WORK_ITEM}`, () => {
|
||||
describe.each`
|
||||
event | expectedMessage
|
||||
${eventTaskReopened()} | ${'Reopened task %{targetLink} in %{resourceParentLink}.'}
|
||||
${eventIncidentReopened()} | ${'Reopened incident %{targetLink} in %{resourceParentLink}.'}
|
||||
${{ target: { type: TARGET_TYPE_WORK_ITEM, issue_type: 'unsupported type' } }} | ${'Reopened resource.'}
|
||||
`('when issue type is $event.target.issue_type', ({ event, expectedMessage }) => {
|
||||
it('renders `ContributionEventBase` with correct props', () => {
|
||||
createComponent({ propsData: { event } });
|
||||
|
||||
expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
|
||||
event,
|
||||
message: expectedMessage,
|
||||
iconName: 'status_open',
|
||||
iconClass: 'gl-text-green-500',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -9,6 +9,7 @@ import ContributionEventPrivate from '~/contribution_events/components/contribut
|
|||
import ContributionEventMerged from '~/contribution_events/components/contribution_event/contribution_event_merged.vue';
|
||||
import ContributionEventCreated from '~/contribution_events/components/contribution_event/contribution_event_created.vue';
|
||||
import ContributionEventClosed from '~/contribution_events/components/contribution_event/contribution_event_closed.vue';
|
||||
import ContributionEventReopened from '~/contribution_events/components/contribution_event/contribution_event_reopened.vue';
|
||||
import {
|
||||
eventApproved,
|
||||
eventExpired,
|
||||
|
|
@ -19,6 +20,7 @@ import {
|
|||
eventMerged,
|
||||
eventCreated,
|
||||
eventClosed,
|
||||
eventReopened,
|
||||
} from '../utils';
|
||||
|
||||
describe('ContributionEvents', () => {
|
||||
|
|
@ -37,6 +39,7 @@ describe('ContributionEvents', () => {
|
|||
eventMerged(),
|
||||
eventCreated(),
|
||||
eventClosed(),
|
||||
eventReopened(),
|
||||
],
|
||||
},
|
||||
});
|
||||
|
|
@ -53,6 +56,7 @@ describe('ContributionEvents', () => {
|
|||
${ContributionEventMerged} | ${eventMerged()}
|
||||
${ContributionEventCreated} | ${eventCreated()}
|
||||
${ContributionEventClosed} | ${eventClosed()}
|
||||
${ContributionEventReopened} | ${eventReopened()}
|
||||
`(
|
||||
'renders `$expectedComponent.name` component and passes expected event',
|
||||
({ expectedComponent, expectedEvent }) => {
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import {
|
|||
EVENT_TYPE_PRIVATE,
|
||||
EVENT_TYPE_MERGED,
|
||||
EVENT_TYPE_CLOSED,
|
||||
EVENT_TYPE_REOPENED,
|
||||
PUSH_EVENT_REF_TYPE_BRANCH,
|
||||
PUSH_EVENT_REF_TYPE_TAG,
|
||||
EVENT_TYPE_CREATED,
|
||||
|
|
@ -16,7 +17,7 @@ import {
|
|||
TARGET_TYPE_MERGE_REQUEST,
|
||||
TARGET_TYPE_WIKI,
|
||||
TARGET_TYPE_DESIGN,
|
||||
TARGET_TYPE_WORK_ITEM,
|
||||
WORK_ITEM_ISSUE_TYPE_ISSUE,
|
||||
WORK_ITEM_ISSUE_TYPE_TASK,
|
||||
WORK_ITEM_ISSUE_TYPE_INCIDENT,
|
||||
} from '~/contribution_events/constants';
|
||||
|
|
@ -25,12 +26,7 @@ const findEventByAction = (action) => () => events.find((event) => event.action
|
|||
const findEventByActionAndTargetType = (action, targetType) => () =>
|
||||
events.find((event) => event.action === action && event.target?.type === targetType);
|
||||
const findEventByActionAndIssueType = (action, issueType) => () =>
|
||||
events.find(
|
||||
(event) =>
|
||||
event.action === action &&
|
||||
event.target?.type === TARGET_TYPE_WORK_ITEM &&
|
||||
event.target.issue_type === issueType,
|
||||
);
|
||||
events.find((event) => event.action === action && event.target.issue_type === issueType);
|
||||
|
||||
export const eventApproved = findEventByAction(EVENT_TYPE_APPROVED);
|
||||
|
||||
|
|
@ -100,3 +96,18 @@ export const eventWikiPageClosed = findClosedEvent(TARGET_TYPE_WIKI);
|
|||
export const eventDesignClosed = findClosedEvent(TARGET_TYPE_DESIGN);
|
||||
export const eventTaskClosed = findWorkItemClosedEvent(WORK_ITEM_ISSUE_TYPE_TASK);
|
||||
export const eventIncidentClosed = findWorkItemClosedEvent(WORK_ITEM_ISSUE_TYPE_INCIDENT);
|
||||
|
||||
export const eventReopened = findEventByAction(EVENT_TYPE_REOPENED);
|
||||
|
||||
export const findReopenedEvent = (targetType) =>
|
||||
findEventByActionAndTargetType(EVENT_TYPE_REOPENED, targetType);
|
||||
export const findWorkItemReopenedEvent = (issueType) =>
|
||||
findEventByActionAndIssueType(EVENT_TYPE_REOPENED, issueType);
|
||||
|
||||
export const eventMilestoneReopened = findReopenedEvent(TARGET_TYPE_MILESTONE);
|
||||
export const eventMergeRequestReopened = findReopenedEvent(TARGET_TYPE_MERGE_REQUEST);
|
||||
export const eventWikiPageReopened = findReopenedEvent(TARGET_TYPE_WIKI);
|
||||
export const eventDesignReopened = findReopenedEvent(TARGET_TYPE_DESIGN);
|
||||
export const eventIssueReopened = findWorkItemReopenedEvent(WORK_ITEM_ISSUE_TYPE_ISSUE);
|
||||
export const eventTaskReopened = findWorkItemReopenedEvent(WORK_ITEM_ISSUE_TYPE_TASK);
|
||||
export const eventIncidentReopened = findWorkItemReopenedEvent(WORK_ITEM_ISSUE_TYPE_INCIDENT);
|
||||
|
|
|
|||
|
|
@ -0,0 +1,24 @@
|
|||
import { TARGET_TYPE_MILESTONE, WORK_ITEM_ISSUE_TYPE_TASK } from '~/contribution_events/constants';
|
||||
import { getValueByEventTarget } from '~/contribution_events/utils';
|
||||
import { eventMilestoneCreated, eventTaskCreated } from './utils';
|
||||
|
||||
describe('getValueByEventTarget', () => {
|
||||
const milestoneValue = 'milestone';
|
||||
const taskValue = 'task';
|
||||
const fallbackValue = 'fallback';
|
||||
|
||||
const map = {
|
||||
[TARGET_TYPE_MILESTONE]: milestoneValue,
|
||||
[WORK_ITEM_ISSUE_TYPE_TASK]: taskValue,
|
||||
fallback: fallbackValue,
|
||||
};
|
||||
|
||||
it.each`
|
||||
event | expected
|
||||
${eventMilestoneCreated()} | ${milestoneValue}
|
||||
${eventTaskCreated()} | ${taskValue}
|
||||
${{ target: { type: 'unsupported type' } }} | ${fallbackValue}
|
||||
`('returns $expected when event is $event', ({ event, expected }) => {
|
||||
expect(getValueByEventTarget(map, event)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
|
@ -7,7 +7,6 @@ import EditEnvironment from '~/environments/components/edit_environment.vue';
|
|||
import { createAlert } from '~/alert';
|
||||
import { visitUrl } from '~/lib/utils/url_utility';
|
||||
import getEnvironment from '~/environments/graphql/queries/environment.query.graphql';
|
||||
import getEnvironmentWithNamespace from '~/environments/graphql/queries/environment_with_namespace.graphql';
|
||||
import updateEnvironment from '~/environments/graphql/mutations/update_environment.mutation.graphql';
|
||||
import { __ } from '~/locale';
|
||||
import createMockApollo from '../__helpers__/mock_apollo_helper';
|
||||
|
|
@ -43,9 +42,6 @@ describe('~/environments/components/edit.vue', () => {
|
|||
let wrapper;
|
||||
|
||||
const getEnvironmentQuery = jest.fn().mockResolvedValue({ data: resolvedEnvironment });
|
||||
const getEnvironmentWithNamespaceQuery = jest
|
||||
.fn()
|
||||
.mockResolvedValue({ data: resolvedEnvironment });
|
||||
|
||||
const updateEnvironmentSuccess = jest
|
||||
.fn()
|
||||
|
|
@ -59,24 +55,17 @@ describe('~/environments/components/edit.vue', () => {
|
|||
|
||||
const mocks = [
|
||||
[getEnvironment, getEnvironmentQuery],
|
||||
[getEnvironmentWithNamespace, getEnvironmentWithNamespaceQuery],
|
||||
[updateEnvironment, mutationHandler],
|
||||
];
|
||||
|
||||
return createMockApollo(mocks);
|
||||
};
|
||||
|
||||
const createWrapperWithApollo = async ({
|
||||
mutationHandler = updateEnvironmentSuccess,
|
||||
kubernetesNamespaceForEnvironment = false,
|
||||
} = {}) => {
|
||||
const createWrapperWithApollo = async ({ mutationHandler = updateEnvironmentSuccess } = {}) => {
|
||||
wrapper = mountExtended(EditEnvironment, {
|
||||
propsData: { environment: {} },
|
||||
provide: {
|
||||
...provide,
|
||||
glFeatures: {
|
||||
kubernetesNamespaceForEnvironment,
|
||||
},
|
||||
},
|
||||
apolloProvider: createMockApolloProvider(mutationHandler),
|
||||
});
|
||||
|
|
@ -169,11 +158,4 @@ describe('~/environments/components/edit.vue', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when `kubernetesNamespaceForEnvironment` is enabled', () => {
|
||||
it('calls the `getEnvironmentWithNamespace` query', () => {
|
||||
createWrapperWithApollo({ kubernetesNamespaceForEnvironment: true });
|
||||
expect(getEnvironmentWithNamespaceQuery).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -42,11 +42,7 @@ describe('~/environments/components/form.vue', () => {
|
|||
},
|
||||
});
|
||||
|
||||
const createWrapperWithApollo = ({
|
||||
propsData = {},
|
||||
kubernetesNamespaceForEnvironment = false,
|
||||
queryResult = null,
|
||||
} = {}) => {
|
||||
const createWrapperWithApollo = ({ propsData = {}, queryResult = null } = {}) => {
|
||||
Vue.use(VueApollo);
|
||||
|
||||
const requestHandlers = [
|
||||
|
|
@ -72,9 +68,6 @@ describe('~/environments/components/form.vue', () => {
|
|||
return mountExtended(EnvironmentForm, {
|
||||
provide: {
|
||||
...PROVIDE,
|
||||
glFeatures: {
|
||||
kubernetesNamespaceForEnvironment,
|
||||
},
|
||||
},
|
||||
propsData: {
|
||||
...DEFAULT_PROPS,
|
||||
|
|
@ -296,127 +289,117 @@ describe('~/environments/components/form.vue', () => {
|
|||
});
|
||||
|
||||
describe('namespace selector', () => {
|
||||
it("doesn't render namespace selector if `kubernetesNamespaceForEnvironment` feature flag is disabled", () => {
|
||||
beforeEach(() => {
|
||||
wrapper = createWrapperWithApollo();
|
||||
});
|
||||
|
||||
it("doesn't render namespace selector by default", () => {
|
||||
expect(findNamespaceSelector().exists()).toBe(false);
|
||||
});
|
||||
|
||||
describe('when `kubernetesNamespaceForEnvironment` feature flag is enabled', () => {
|
||||
beforeEach(() => {
|
||||
wrapper = createWrapperWithApollo({
|
||||
kubernetesNamespaceForEnvironment: true,
|
||||
});
|
||||
describe('when the agent was selected', () => {
|
||||
beforeEach(async () => {
|
||||
await selectAgent();
|
||||
});
|
||||
|
||||
it("doesn't render namespace selector by default", () => {
|
||||
it('renders namespace selector', () => {
|
||||
expect(findNamespaceSelector().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('requests the kubernetes namespaces with the correct configuration', async () => {
|
||||
const configuration = {
|
||||
basePath: mockKasTunnelUrl.replace(/\/$/, ''),
|
||||
baseOptions: {
|
||||
headers: {
|
||||
'GitLab-Agent-Id': 2,
|
||||
},
|
||||
withCredentials: true,
|
||||
},
|
||||
};
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(getNamespacesQueryResult).toHaveBeenCalledWith(
|
||||
{},
|
||||
{ configuration },
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('sets the loading prop while fetching the list', async () => {
|
||||
expect(findNamespaceSelector().props('loading')).toBe(true);
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(findNamespaceSelector().props('loading')).toBe(false);
|
||||
});
|
||||
|
||||
it('renders a list of available namespaces', async () => {
|
||||
await waitForPromises();
|
||||
|
||||
expect(findNamespaceSelector().props('items')).toEqual([
|
||||
{ text: 'default', value: 'default' },
|
||||
{ text: 'agent', value: 'agent' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('filters the namespaces list on user search', async () => {
|
||||
await waitForPromises();
|
||||
await findNamespaceSelector().vm.$emit('search', 'default');
|
||||
|
||||
expect(findNamespaceSelector().props('items')).toEqual([
|
||||
{ value: 'default', text: 'default' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('updates namespace selector field with the name of selected namespace', async () => {
|
||||
await waitForPromises();
|
||||
await findNamespaceSelector().vm.$emit('select', 'agent');
|
||||
|
||||
expect(findNamespaceSelector().props('toggleText')).toBe('agent');
|
||||
});
|
||||
|
||||
it('emits changes to the kubernetesNamespace', async () => {
|
||||
await waitForPromises();
|
||||
await findNamespaceSelector().vm.$emit('select', 'agent');
|
||||
|
||||
expect(wrapper.emitted('change')[1]).toEqual([
|
||||
{ name: '', externalUrl: '', kubernetesNamespace: 'agent' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('clears namespace selector when another agent was selected', async () => {
|
||||
await waitForPromises();
|
||||
await findNamespaceSelector().vm.$emit('select', 'agent');
|
||||
|
||||
expect(findNamespaceSelector().props('toggleText')).toBe('agent');
|
||||
|
||||
await findAgentSelector().vm.$emit('select', '1');
|
||||
expect(findNamespaceSelector().props('toggleText')).toBe(
|
||||
EnvironmentForm.i18n.namespaceHelpText,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when cannot connect to the cluster', () => {
|
||||
const error = new Error('Error from the cluster_client API');
|
||||
|
||||
beforeEach(async () => {
|
||||
wrapper = createWrapperWithApollo({
|
||||
queryResult: jest.fn().mockRejectedValueOnce(error),
|
||||
});
|
||||
|
||||
await selectAgent();
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it("doesn't render the namespace selector", () => {
|
||||
expect(findNamespaceSelector().exists()).toBe(false);
|
||||
});
|
||||
|
||||
describe('when the agent was selected', () => {
|
||||
beforeEach(async () => {
|
||||
await selectAgent();
|
||||
});
|
||||
|
||||
it('renders namespace selector', () => {
|
||||
expect(findNamespaceSelector().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('requests the kubernetes namespaces with the correct configuration', async () => {
|
||||
const configuration = {
|
||||
basePath: mockKasTunnelUrl.replace(/\/$/, ''),
|
||||
baseOptions: {
|
||||
headers: {
|
||||
'GitLab-Agent-Id': 2,
|
||||
},
|
||||
withCredentials: true,
|
||||
},
|
||||
};
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(getNamespacesQueryResult).toHaveBeenCalledWith(
|
||||
{},
|
||||
{ configuration },
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('sets the loading prop while fetching the list', async () => {
|
||||
expect(findNamespaceSelector().props('loading')).toBe(true);
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(findNamespaceSelector().props('loading')).toBe(false);
|
||||
});
|
||||
|
||||
it('renders a list of available namespaces', async () => {
|
||||
await waitForPromises();
|
||||
|
||||
expect(findNamespaceSelector().props('items')).toEqual([
|
||||
{ text: 'default', value: 'default' },
|
||||
{ text: 'agent', value: 'agent' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('filters the namespaces list on user search', async () => {
|
||||
await waitForPromises();
|
||||
await findNamespaceSelector().vm.$emit('search', 'default');
|
||||
|
||||
expect(findNamespaceSelector().props('items')).toEqual([
|
||||
{ value: 'default', text: 'default' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('updates namespace selector field with the name of selected namespace', async () => {
|
||||
await waitForPromises();
|
||||
await findNamespaceSelector().vm.$emit('select', 'agent');
|
||||
|
||||
expect(findNamespaceSelector().props('toggleText')).toBe('agent');
|
||||
});
|
||||
|
||||
it('emits changes to the kubernetesNamespace', async () => {
|
||||
await waitForPromises();
|
||||
await findNamespaceSelector().vm.$emit('select', 'agent');
|
||||
|
||||
expect(wrapper.emitted('change')[1]).toEqual([
|
||||
{ name: '', externalUrl: '', kubernetesNamespace: 'agent' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('clears namespace selector when another agent was selected', async () => {
|
||||
await waitForPromises();
|
||||
await findNamespaceSelector().vm.$emit('select', 'agent');
|
||||
|
||||
expect(findNamespaceSelector().props('toggleText')).toBe('agent');
|
||||
|
||||
await findAgentSelector().vm.$emit('select', '1');
|
||||
expect(findNamespaceSelector().props('toggleText')).toBe(
|
||||
EnvironmentForm.i18n.namespaceHelpText,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when cannot connect to the cluster', () => {
|
||||
const error = new Error('Error from the cluster_client API');
|
||||
|
||||
beforeEach(async () => {
|
||||
wrapper = createWrapperWithApollo({
|
||||
kubernetesNamespaceForEnvironment: true,
|
||||
queryResult: jest.fn().mockRejectedValueOnce(error),
|
||||
});
|
||||
|
||||
await selectAgent();
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it("doesn't render the namespace selector", () => {
|
||||
expect(findNamespaceSelector().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('renders an alert', () => {
|
||||
expect(findAlert().text()).toBe('Error from the cluster_client API');
|
||||
});
|
||||
it('renders an alert', () => {
|
||||
expect(findAlert().text()).toBe('Error from the cluster_client API');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -430,7 +413,6 @@ describe('~/environments/components/form.vue', () => {
|
|||
beforeEach(() => {
|
||||
wrapper = createWrapperWithApollo({
|
||||
propsData: { environment: environmentWithAgent },
|
||||
kubernetesNamespaceForEnvironment: true,
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -463,7 +445,6 @@ describe('~/environments/components/form.vue', () => {
|
|||
beforeEach(() => {
|
||||
wrapper = createWrapperWithApollo({
|
||||
propsData: { environment: environmentWithAgentAndNamespace },
|
||||
kubernetesNamespaceForEnvironment: true,
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ import Deployment from '~/environments/components/deployment.vue';
|
|||
import DeployBoardWrapper from '~/environments/components/deploy_board_wrapper.vue';
|
||||
import KubernetesOverview from '~/environments/components/kubernetes_overview.vue';
|
||||
import getEnvironmentClusterAgent from '~/environments/graphql/queries/environment_cluster_agent.query.graphql';
|
||||
import getEnvironmentClusterAgentWithNamespace from '~/environments/graphql/queries/environment_cluster_agent_with_namespace.query.graphql';
|
||||
import { resolvedEnvironment, rolloutStatus, agent } from './graphql/mock_data';
|
||||
import { mockKasTunnelUrl } from './mock_data';
|
||||
|
||||
|
|
@ -22,7 +21,6 @@ Vue.use(VueApollo);
|
|||
describe('~/environments/components/new_environment_item.vue', () => {
|
||||
let wrapper;
|
||||
let queryResponseHandler;
|
||||
let queryWithNamespaceResponseHandler;
|
||||
|
||||
const projectPath = '/1';
|
||||
|
||||
|
|
@ -33,27 +31,15 @@ describe('~/environments/components/new_environment_item.vue', () => {
|
|||
id: '1',
|
||||
environment: {
|
||||
id: '1',
|
||||
kubernetesNamespace: 'default',
|
||||
clusterAgent,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
queryResponseHandler = jest.fn().mockResolvedValue(response);
|
||||
queryWithNamespaceResponseHandler = jest.fn().mockResolvedValue({
|
||||
data: {
|
||||
project: {
|
||||
id: response.data.project.id,
|
||||
environment: {
|
||||
...response.data.project.environment,
|
||||
kubernetesNamespace: 'default',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
return createMockApollo([
|
||||
[getEnvironmentClusterAgent, queryResponseHandler],
|
||||
[getEnvironmentClusterAgentWithNamespace, queryWithNamespaceResponseHandler],
|
||||
]);
|
||||
|
||||
return createMockApollo([[getEnvironmentClusterAgent, queryResponseHandler]]);
|
||||
};
|
||||
|
||||
const createWrapper = ({ propsData = {}, provideData = {}, apolloProvider } = {}) =>
|
||||
|
|
@ -548,25 +534,6 @@ describe('~/environments/components/new_environment_item.vue', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should request agent data with kubernetes namespace when `kubernetesNamespaceForEnvironment` feature flag is enabled', async () => {
|
||||
wrapper = createWrapper({
|
||||
propsData: { environment: resolvedEnvironment },
|
||||
provideData: {
|
||||
glFeatures: {
|
||||
kubernetesNamespaceForEnvironment: true,
|
||||
},
|
||||
},
|
||||
apolloProvider: createApolloProvider(agent),
|
||||
});
|
||||
|
||||
await expandCollapsedSection();
|
||||
|
||||
expect(queryWithNamespaceResponseHandler).toHaveBeenCalledWith({
|
||||
environmentName: resolvedEnvironment.name,
|
||||
projectFullPath: projectPath,
|
||||
});
|
||||
});
|
||||
|
||||
it('should render if the environment has an agent associated', async () => {
|
||||
wrapper = createWrapper({
|
||||
propsData: { environment: resolvedEnvironment },
|
||||
|
|
@ -579,26 +546,6 @@ describe('~/environments/components/new_environment_item.vue', () => {
|
|||
expect(findKubernetesOverview().props()).toMatchObject({
|
||||
clusterAgent: agent,
|
||||
environmentName: resolvedEnvironment.name,
|
||||
});
|
||||
});
|
||||
|
||||
it('should render with the namespace if `kubernetesNamespaceForEnvironment` feature flag is enabled and the environment has an agent associated', async () => {
|
||||
wrapper = createWrapper({
|
||||
propsData: { environment: resolvedEnvironment },
|
||||
provideData: {
|
||||
glFeatures: {
|
||||
kubernetesNamespaceForEnvironment: true,
|
||||
},
|
||||
},
|
||||
apolloProvider: createApolloProvider(agent),
|
||||
});
|
||||
|
||||
await expandCollapsedSection();
|
||||
await waitForPromises();
|
||||
|
||||
expect(findKubernetesOverview().props()).toEqual({
|
||||
clusterAgent: agent,
|
||||
environmentName: resolvedEnvironment.name,
|
||||
namespace: 'default',
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ RSpec.describe 'Users (JavaScript fixtures)', feature_category: :user_profile do
|
|||
end
|
||||
|
||||
it 'controller/users/activity.json' do
|
||||
get :activity, params: { username: user.username, limit: 50 }, format: :json
|
||||
get :activity, params: { username: user.username, limit: 100 }, format: :json
|
||||
|
||||
expect(response).to be_successful
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,15 +7,22 @@ import LineHighlighter from '~/blob/line_highlighter';
|
|||
import addBlobLinksTracking from '~/blob/blob_links_tracking';
|
||||
import { BLOB_DATA_MOCK, CHUNK_1, CHUNK_2, LANGUAGE_MOCK } from './mock_data';
|
||||
|
||||
jest.mock('~/blob/line_highlighter');
|
||||
const lineHighlighter = new LineHighlighter();
|
||||
jest.mock('~/blob/line_highlighter', () =>
|
||||
jest.fn().mockReturnValue({
|
||||
highlightHash: jest.fn(),
|
||||
}),
|
||||
);
|
||||
jest.mock('~/blob/blob_links_tracking');
|
||||
|
||||
describe('Source Viewer component', () => {
|
||||
let wrapper;
|
||||
const CHUNKS_MOCK = [CHUNK_1, CHUNK_2];
|
||||
const hash = '#L142';
|
||||
|
||||
const createComponent = () => {
|
||||
wrapper = shallowMountExtended(SourceViewer, {
|
||||
mocks: { $route: { hash } },
|
||||
propsData: { blob: BLOB_DATA_MOCK, chunks: CHUNKS_MOCK },
|
||||
});
|
||||
};
|
||||
|
|
@ -48,4 +55,10 @@ describe('Source Viewer component', () => {
|
|||
expect(findChunks().at(1).props()).toMatchObject(CHUNK_2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('hash highlighting', () => {
|
||||
it('calls highlightHash with expected parameter', () => {
|
||||
expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ require 'spec_helper'
|
|||
|
||||
RSpec.describe Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBlobs, feature_category: :source_code_management do
|
||||
let_it_be(:project) { create(:project, :small_repo) }
|
||||
let(:repository) { project.repository }
|
||||
let(:file_size_limit) { 1 }
|
||||
let(:any_quarantined_blobs) do
|
||||
described_class.new(
|
||||
|
|
@ -37,7 +38,7 @@ RSpec.describe Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBl
|
|||
end
|
||||
|
||||
before do
|
||||
allow(Gitlab::Git::HookEnv).to receive(:all).with(project.repository.gl_repository).and_return(git_env)
|
||||
allow(Gitlab::Git::HookEnv).to receive(:all).with(repository.gl_repository).and_return(git_env)
|
||||
end
|
||||
|
||||
it 'returns an emtpy array' do
|
||||
|
|
@ -47,9 +48,25 @@ RSpec.describe Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBl
|
|||
context 'when the file is over the limit' do
|
||||
let(:file_size_limit) { 0 }
|
||||
|
||||
it 'returns an array with the blobs that are over the limit' do
|
||||
expect(subject.size).to eq(1)
|
||||
expect(subject.first).to be_kind_of(Gitlab::Git::Blob)
|
||||
context 'when the blob does not exist in the repo' do
|
||||
before do
|
||||
allow(repository.gitaly_commit_client).to receive(:object_existence_map).and_return(Hash.new { false })
|
||||
end
|
||||
|
||||
it 'returns an array with the blobs that are over the limit' do
|
||||
expect(subject.size).to eq(1)
|
||||
expect(subject.first).to be_kind_of(Gitlab::Git::Blob)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the blob exists in the repo' do
|
||||
before do
|
||||
allow(repository.gitaly_commit_client).to receive(:object_existence_map).and_return(Hash.new { true })
|
||||
end
|
||||
|
||||
it 'filters out the blobs in the repo' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -48,6 +48,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports, feature_category: :pipeline_c
|
|||
:terraform | 'tfplan.json'
|
||||
:accessibility | 'gl-accessibility.json'
|
||||
:cyclonedx | 'gl-sbom.cdx.zip'
|
||||
:annotations | 'gl-annotations.json'
|
||||
end
|
||||
|
||||
with_them do
|
||||
|
|
|
|||
|
|
@ -433,6 +433,7 @@ builds:
|
|||
- dast_scanner_profiles_build
|
||||
- dast_scanner_profile
|
||||
- job_annotations
|
||||
- job_artifacts_annotations
|
||||
bridges:
|
||||
- user
|
||||
- pipeline
|
||||
|
|
|
|||
|
|
@ -1,99 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Metrics::Dashboard::Transformers::Yml::V1::PrometheusMetrics do
|
||||
include MetricsDashboardHelpers
|
||||
|
||||
describe '#execute' do
|
||||
subject { described_class.new(dashboard_hash) }
|
||||
|
||||
context 'valid dashboard' do
|
||||
let_it_be(:dashboard_hash) do
|
||||
{
|
||||
panel_groups: [{
|
||||
panels: [
|
||||
{
|
||||
title: 'Panel 1 title',
|
||||
y_label: 'Panel 1 y_label',
|
||||
metrics: [
|
||||
{
|
||||
query_range: 'Panel 1 metric 1 query_range',
|
||||
unit: 'Panel 1 metric 1 unit',
|
||||
label: 'Panel 1 metric 1 label',
|
||||
id: 'Panel 1 metric 1 id'
|
||||
},
|
||||
{
|
||||
query: 'Panel 1 metric 2 query',
|
||||
unit: 'Panel 1 metric 2 unit',
|
||||
label: 'Panel 1 metric 2 label',
|
||||
id: 'Panel 1 metric 2 id'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
title: 'Panel 2 title',
|
||||
y_label: 'Panel 2 y_label',
|
||||
metrics: [{
|
||||
query_range: 'Panel 2 metric 1 query_range',
|
||||
unit: 'Panel 2 metric 1 unit',
|
||||
label: 'Panel 2 metric 1 label',
|
||||
id: 'Panel 2 metric 1 id'
|
||||
}]
|
||||
}
|
||||
]
|
||||
}]
|
||||
}
|
||||
end
|
||||
|
||||
let(:expected_metrics) do
|
||||
[
|
||||
{
|
||||
title: 'Panel 1 title',
|
||||
y_label: 'Panel 1 y_label',
|
||||
query: "Panel 1 metric 1 query_range",
|
||||
unit: 'Panel 1 metric 1 unit',
|
||||
legend: 'Panel 1 metric 1 label',
|
||||
identifier: 'Panel 1 metric 1 id',
|
||||
group: 3,
|
||||
common: false
|
||||
},
|
||||
{
|
||||
title: 'Panel 1 title',
|
||||
y_label: 'Panel 1 y_label',
|
||||
query: 'Panel 1 metric 2 query',
|
||||
unit: 'Panel 1 metric 2 unit',
|
||||
legend: 'Panel 1 metric 2 label',
|
||||
identifier: 'Panel 1 metric 2 id',
|
||||
group: 3,
|
||||
common: false
|
||||
},
|
||||
{
|
||||
title: 'Panel 2 title',
|
||||
y_label: 'Panel 2 y_label',
|
||||
query: 'Panel 2 metric 1 query_range',
|
||||
unit: 'Panel 2 metric 1 unit',
|
||||
legend: 'Panel 2 metric 1 label',
|
||||
identifier: 'Panel 2 metric 1 id',
|
||||
group: 3,
|
||||
common: false
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
it 'returns collection of metrics with correct attributes' do
|
||||
expect(subject.execute).to match_array(expected_metrics)
|
||||
end
|
||||
end
|
||||
|
||||
context 'invalid dashboard' do
|
||||
let(:dashboard_hash) { {} }
|
||||
|
||||
it 'raises missing attribute error' do
|
||||
expect { subject.execute }.to raise_error(
|
||||
::Gitlab::Metrics::Dashboard::Transformers::Errors::MissingAttribute, "Missing attribute: 'panel_groups'"
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Metrics::Dashboard::Validator::Client do
|
||||
include MetricsDashboardHelpers
|
||||
|
||||
let_it_be(:schema_path) { 'lib/gitlab/metrics/dashboard/validator/schemas/dashboard.json' }
|
||||
|
||||
subject { described_class.new(dashboard, schema_path) }
|
||||
|
||||
describe '#execute' do
|
||||
context 'with no validation errors' do
|
||||
let(:dashboard) { load_sample_dashboard }
|
||||
|
||||
it 'returns empty array' do
|
||||
expect(subject.execute).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with validation errors' do
|
||||
let(:dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/invalid_dashboard.yml')) }
|
||||
|
||||
it 'returns array of error objects' do
|
||||
expect(subject.execute).to include(Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Metrics::Dashboard::Validator::CustomFormats do
|
||||
describe '#format_handlers' do
|
||||
describe 'add_to_metric_id_cache' do
|
||||
it 'adds data to metric id cache' do
|
||||
subject.format_handlers['add_to_metric_id_cache'].call('metric_id', '_schema')
|
||||
|
||||
expect(subject.metric_ids_cache).to eq(["metric_id"])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,149 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Metrics::Dashboard::Validator::Errors do
|
||||
describe Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError do
|
||||
context 'empty error hash' do
|
||||
let(:error_hash) { {} }
|
||||
|
||||
it 'uses default error message' do
|
||||
expect(described_class.new(error_hash).message).to eq('Dashboard failed schema validation')
|
||||
end
|
||||
end
|
||||
|
||||
context 'formatted message' do
|
||||
subject { described_class.new(error_hash).message }
|
||||
|
||||
let(:error_hash) do
|
||||
{
|
||||
'data' => 'property_name',
|
||||
'data_pointer' => pointer,
|
||||
'type' => type,
|
||||
'schema' => 'schema',
|
||||
'details' => details
|
||||
}
|
||||
end
|
||||
|
||||
context 'for root object' do
|
||||
let(:pointer) { '' }
|
||||
|
||||
context 'when required keys are missing' do
|
||||
let(:type) { 'required' }
|
||||
let(:details) { { 'missing_keys' => ['one'] } }
|
||||
|
||||
it { is_expected.to eq 'root is missing required keys: one' }
|
||||
end
|
||||
|
||||
context 'when there is type mismatch' do
|
||||
%w(null string boolean integer number array object).each do |expected_type|
|
||||
context "on type: #{expected_type}" do
|
||||
let(:type) { expected_type }
|
||||
let(:details) { nil }
|
||||
|
||||
it { is_expected.to eq "'property_name' at root is not of type: #{expected_type}" }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'for nested object' do
|
||||
let(:pointer) { '/nested_objects/0' }
|
||||
|
||||
context 'when required keys are missing' do
|
||||
let(:type) { 'required' }
|
||||
let(:details) { { 'missing_keys' => ['two'] } }
|
||||
|
||||
it { is_expected.to eq '/nested_objects/0 is missing required keys: two' }
|
||||
end
|
||||
|
||||
context 'when there is type mismatch' do
|
||||
%w(null string boolean integer number array object).each do |expected_type|
|
||||
context "on type: #{expected_type}" do
|
||||
let(:type) { expected_type }
|
||||
let(:details) { nil }
|
||||
|
||||
it { is_expected.to eq "'property_name' at /nested_objects/0 is not of type: #{expected_type}" }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data does not match pattern' do
|
||||
let(:type) { 'pattern' }
|
||||
let(:error_hash) do
|
||||
{
|
||||
'data' => 'property_name',
|
||||
'data_pointer' => pointer,
|
||||
'type' => type,
|
||||
'schema' => { 'pattern' => 'aa.*' }
|
||||
}
|
||||
end
|
||||
|
||||
it { is_expected.to eq "'property_name' at /nested_objects/0 does not match pattern: aa.*" }
|
||||
end
|
||||
|
||||
context 'when data does not match format' do
|
||||
let(:type) { 'format' }
|
||||
let(:error_hash) do
|
||||
{
|
||||
'data' => 'property_name',
|
||||
'data_pointer' => pointer,
|
||||
'type' => type,
|
||||
'schema' => { 'format' => 'date-time' }
|
||||
}
|
||||
end
|
||||
|
||||
it { is_expected.to eq "'property_name' at /nested_objects/0 does not match format: date-time" }
|
||||
end
|
||||
|
||||
context 'when data is not const' do
|
||||
let(:type) { 'const' }
|
||||
let(:error_hash) do
|
||||
{
|
||||
'data' => 'property_name',
|
||||
'data_pointer' => pointer,
|
||||
'type' => type,
|
||||
'schema' => { 'const' => 'one' }
|
||||
}
|
||||
end
|
||||
|
||||
it { is_expected.to eq "'property_name' at /nested_objects/0 is not: \"one\"" }
|
||||
end
|
||||
|
||||
context 'when data is not included in enum' do
|
||||
let(:type) { 'enum' }
|
||||
let(:error_hash) do
|
||||
{
|
||||
'data' => 'property_name',
|
||||
'data_pointer' => pointer,
|
||||
'type' => type,
|
||||
'schema' => { 'enum' => %w(one two) }
|
||||
}
|
||||
end
|
||||
|
||||
it { is_expected.to eq "'property_name' at /nested_objects/0 is not one of: [\"one\", \"two\"]" }
|
||||
end
|
||||
|
||||
context 'when data is not included in enum' do
|
||||
let(:type) { 'unknown' }
|
||||
let(:error_hash) do
|
||||
{
|
||||
'data' => 'property_name',
|
||||
'data_pointer' => pointer,
|
||||
'type' => type,
|
||||
'schema' => 'schema'
|
||||
}
|
||||
end
|
||||
|
||||
it { is_expected.to eq "'property_name' at /nested_objects/0 is invalid: error_type=unknown" }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds do
|
||||
it 'has custom error message' do
|
||||
expect(described_class.new.message).to eq('metric_id must be unique across a project')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Metrics::Dashboard::Validator::PostSchemaValidator do
|
||||
describe '#validate' do
|
||||
context 'with no project and dashboard_path provided' do
|
||||
context 'unique local metric_ids' do
|
||||
it 'returns empty array' do
|
||||
expect(described_class.new(metric_ids: [1, 2, 3]).validate).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'duplicate local metrics_ids' do
|
||||
it 'returns error' do
|
||||
expect(described_class.new(metric_ids: [1, 1]).validate)
|
||||
.to eq([Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with project and dashboard_path' do
|
||||
let(:project) { create(:project) }
|
||||
|
||||
subject do
|
||||
described_class.new(
|
||||
project: project,
|
||||
metric_ids: ['some_identifier'],
|
||||
dashboard_path: 'test/path.yml'
|
||||
).validate
|
||||
end
|
||||
|
||||
context 'with unique metric identifiers' do
|
||||
before do
|
||||
create(:prometheus_metric,
|
||||
project: project,
|
||||
identifier: 'some_other_identifier',
|
||||
dashboard_path: 'test/path.yml'
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns empty array' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'duplicate metric identifiers in database' do
|
||||
context 'with different dashboard_path' do
|
||||
before do
|
||||
create(:prometheus_metric,
|
||||
project: project,
|
||||
identifier: 'some_identifier',
|
||||
dashboard_path: 'some/other/path.yml'
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns error' do
|
||||
expect(subject).to include(Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with same dashboard_path' do
|
||||
before do
|
||||
create(:prometheus_metric,
|
||||
project: project,
|
||||
identifier: 'some_identifier',
|
||||
dashboard_path: 'test/path.yml'
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns empty array' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,146 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Metrics::Dashboard::Validator do
|
||||
include MetricsDashboardHelpers
|
||||
|
||||
let_it_be(:valid_dashboard) { load_sample_dashboard }
|
||||
let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/invalid_dashboard.yml')) }
|
||||
let_it_be(:duplicate_id_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/duplicate_id_dashboard.yml')) }
|
||||
|
||||
let_it_be(:project) { create(:project) }
|
||||
|
||||
describe '#validate' do
|
||||
context 'valid dashboard schema' do
|
||||
it 'returns true' do
|
||||
expect(described_class.validate(valid_dashboard)).to be true
|
||||
end
|
||||
|
||||
context 'with duplicate metric_ids' do
|
||||
it 'returns false' do
|
||||
expect(described_class.validate(duplicate_id_dashboard)).to be false
|
||||
end
|
||||
end
|
||||
|
||||
context 'with dashboard_path and project' do
|
||||
subject { described_class.validate(valid_dashboard, dashboard_path: 'test/path.yml', project: project) }
|
||||
|
||||
context 'with no conflicting metric identifiers in db' do
|
||||
it { is_expected.to be true }
|
||||
end
|
||||
|
||||
context 'with metric identifier present in current dashboard' do
|
||||
before do
|
||||
create(:prometheus_metric,
|
||||
identifier: 'metric_a1',
|
||||
dashboard_path: 'test/path.yml',
|
||||
project: project
|
||||
)
|
||||
end
|
||||
|
||||
it { is_expected.to be true }
|
||||
end
|
||||
|
||||
context 'with metric identifier present in another dashboard' do
|
||||
before do
|
||||
create(:prometheus_metric,
|
||||
identifier: 'metric_a1',
|
||||
dashboard_path: 'some/other/dashboard/path.yml',
|
||||
project: project
|
||||
)
|
||||
end
|
||||
|
||||
it { is_expected.to be false }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'invalid dashboard schema' do
|
||||
it 'returns false' do
|
||||
expect(described_class.validate(invalid_dashboard)).to be false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#validate!' do
|
||||
shared_examples 'validation failed' do |errors_message|
|
||||
it 'raises error with corresponding messages', :aggregate_failures do
|
||||
expect { subject }.to raise_error do |error|
|
||||
expect(error).to be_kind_of(Gitlab::Metrics::Dashboard::Validator::Errors::InvalidDashboardError)
|
||||
expect(error.message).to eq(errors_message)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'valid dashboard schema' do
|
||||
it 'returns true' do
|
||||
expect(described_class.validate!(valid_dashboard)).to be true
|
||||
end
|
||||
|
||||
context 'with duplicate metric_ids' do
|
||||
subject { described_class.validate!(duplicate_id_dashboard) }
|
||||
|
||||
it_behaves_like 'validation failed', 'metric_id must be unique across a project'
|
||||
end
|
||||
|
||||
context 'with dashboard_path and project' do
|
||||
subject { described_class.validate!(valid_dashboard, dashboard_path: 'test/path.yml', project: project) }
|
||||
|
||||
context 'with no conflicting metric identifiers in db' do
|
||||
it { is_expected.to be true }
|
||||
end
|
||||
|
||||
context 'with metric identifier present in current dashboard' do
|
||||
before do
|
||||
create(:prometheus_metric,
|
||||
identifier: 'metric_a1',
|
||||
dashboard_path: 'test/path.yml',
|
||||
project: project
|
||||
)
|
||||
end
|
||||
|
||||
it { is_expected.to be true }
|
||||
end
|
||||
|
||||
context 'with metric identifier present in another dashboard' do
|
||||
before do
|
||||
create(:prometheus_metric,
|
||||
identifier: 'metric_a1',
|
||||
dashboard_path: 'some/other/dashboard/path.yml',
|
||||
project: project
|
||||
)
|
||||
end
|
||||
|
||||
it_behaves_like 'validation failed', 'metric_id must be unique across a project'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'invalid dashboard schema' do
|
||||
subject { described_class.validate!(invalid_dashboard) }
|
||||
|
||||
context 'wrong property type' do
|
||||
it_behaves_like 'validation failed', "'this_should_be_a_int' at /panel_groups/0/panels/0/weight is not of type: number"
|
||||
end
|
||||
|
||||
context 'panel groups missing' do
|
||||
let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_missing_panel_groups.yml')) }
|
||||
|
||||
it_behaves_like 'validation failed', 'root is missing required keys: panel_groups'
|
||||
end
|
||||
|
||||
context 'groups are missing panels and group keys' do
|
||||
let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_groups_missing_panels_and_group.yml')) }
|
||||
|
||||
it_behaves_like 'validation failed', '/panel_groups/0 is missing required keys: group'
|
||||
end
|
||||
|
||||
context 'panel is missing metrics key' do
|
||||
let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_panel_is_missing_metrics.yml')) }
|
||||
|
||||
it_behaves_like 'validation failed', '/panel_groups/0/panels/0 is missing required keys: metrics'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -59,6 +59,19 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
|
|||
described_class.initialize_process_metrics
|
||||
end
|
||||
|
||||
context 'when emit_sidekiq_histogram FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(emit_sidekiq_histogram_metrics: false)
|
||||
allow(Gitlab::SidekiqConfig).to receive(:current_worker_queue_mappings).and_return('MergeWorker' => 'merge')
|
||||
end
|
||||
|
||||
it 'does not initialize sidekiq_jobs_completion_seconds' do
|
||||
expect(completion_seconds_metric).not_to receive(:get)
|
||||
|
||||
described_class.initialize_process_metrics
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples "not initializing sidekiq SLIs" do
|
||||
it 'does not initialize sidekiq SLIs' do
|
||||
expect(Gitlab::Metrics::SidekiqSlis)
|
||||
|
|
@ -441,5 +454,53 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when emit_sidekiq_histogram_metrics FF is disabled' do
|
||||
include_context 'server metrics with mocked prometheus'
|
||||
include_context 'server metrics call' do
|
||||
let(:stub_subject) { false }
|
||||
end
|
||||
|
||||
subject(:middleware) { described_class.new }
|
||||
|
||||
let(:job) { {} }
|
||||
let(:queue) { :test }
|
||||
let(:worker_class) do
|
||||
Class.new do
|
||||
def self.name
|
||||
"TestWorker"
|
||||
end
|
||||
include ApplicationWorker
|
||||
end
|
||||
end
|
||||
|
||||
let(:worker) { worker_class.new }
|
||||
let(:labels) do
|
||||
{ queue: queue.to_s,
|
||||
worker: worker.class.name,
|
||||
boundary: "",
|
||||
external_dependencies: "no",
|
||||
feature_category: "",
|
||||
urgency: "low" }
|
||||
end
|
||||
|
||||
before do
|
||||
stub_feature_flags(emit_sidekiq_histogram_metrics: false)
|
||||
end
|
||||
|
||||
it 'does not emit histogram metrics' do
|
||||
expect(completion_seconds_metric).not_to receive(:observe)
|
||||
expect(queue_duration_seconds).not_to receive(:observe)
|
||||
expect(failed_total_metric).not_to receive(:increment)
|
||||
|
||||
middleware.call(worker, job, queue) { nil }
|
||||
end
|
||||
|
||||
it 'emits sidekiq_jobs_completion_seconds_sum metric' do
|
||||
expect(completion_seconds_sum_metric).to receive(:increment).with(labels, monotonic_time_duration)
|
||||
|
||||
middleware.call(worker, job, queue) { nil }
|
||||
end
|
||||
end
|
||||
end
|
||||
# rubocop: enable RSpec/MultipleMemoizedHelpers
|
||||
|
|
|
|||
|
|
@ -3,41 +3,31 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_redis_shared_state do
|
||||
let(:user1) { build(:user, id: 1) }
|
||||
let(:user) { build(:user, id: 1) }
|
||||
let(:user2) { build(:user, id: 2) }
|
||||
let(:user3) { build(:user, id: 3) }
|
||||
let(:project) { build(:project) }
|
||||
let(:namespace) { project.namespace }
|
||||
let(:time) { Time.zone.now }
|
||||
|
||||
shared_examples 'tracks and counts action' do
|
||||
subject { track_action(author: user, project: project) }
|
||||
|
||||
before do
|
||||
stub_application_setting(usage_ping_enabled: true)
|
||||
end
|
||||
|
||||
specify do
|
||||
aggregate_failures do
|
||||
expect(track_action(author: user1, project: project)).to be_truthy
|
||||
expect(track_action(author: user, project: project)).to be_truthy
|
||||
expect(track_action(author: user2, project: project)).to be_truthy
|
||||
expect(track_action(author: user3, time: time.end_of_week - 3.days, project: project)).to be_truthy
|
||||
expect(track_action(author: user3, project: project)).to be_truthy
|
||||
|
||||
expect(count_unique(date_from: time.beginning_of_week, date_to: 1.week.from_now)).to eq(3)
|
||||
end
|
||||
end
|
||||
|
||||
it 'track snowplow event' do
|
||||
track_action(author: user1, project: project)
|
||||
|
||||
expect_snowplow_event(
|
||||
category: described_class.name,
|
||||
action: 'ide_edit',
|
||||
label: 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit',
|
||||
namespace: project.namespace,
|
||||
property: event_name,
|
||||
project: project,
|
||||
user: user1,
|
||||
context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_h]
|
||||
)
|
||||
end
|
||||
it_behaves_like 'internal event tracking'
|
||||
|
||||
it 'does not track edit actions if author is not present' do
|
||||
expect(track_action(author: nil, project: project)).to be_nil
|
||||
|
|
@ -45,7 +35,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
|
|||
end
|
||||
|
||||
context 'for web IDE edit actions' do
|
||||
let(:event_name) { described_class::EDIT_BY_WEB_IDE }
|
||||
let(:action) { described_class::EDIT_BY_WEB_IDE }
|
||||
|
||||
it_behaves_like 'tracks and counts action' do
|
||||
def track_action(params)
|
||||
|
|
@ -59,7 +49,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
|
|||
end
|
||||
|
||||
context 'for SFE edit actions' do
|
||||
let(:event_name) { described_class::EDIT_BY_SFE }
|
||||
let(:action) { described_class::EDIT_BY_SFE }
|
||||
|
||||
it_behaves_like 'tracks and counts action' do
|
||||
def track_action(params)
|
||||
|
|
@ -73,7 +63,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
|
|||
end
|
||||
|
||||
context 'for snippet editor edit actions' do
|
||||
let(:event_name) { described_class::EDIT_BY_SNIPPET_EDITOR }
|
||||
let(:action) { described_class::EDIT_BY_SNIPPET_EDITOR }
|
||||
|
||||
it_behaves_like 'tracks and counts action' do
|
||||
def track_action(params)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ RSpec.describe Ci::JobAnnotation, feature_category: :build_artifacts do
|
|||
it { is_expected.to belong_to(:job).class_name('Ci::Build').inverse_of(:job_annotations) }
|
||||
it { is_expected.to validate_presence_of(:name) }
|
||||
it { is_expected.to validate_length_of(:name).is_at_most(255) }
|
||||
it { is_expected.to validate_uniqueness_of(:name).scoped_to([:job_id, :partition_id]) }
|
||||
end
|
||||
|
||||
describe '.create' do
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
|
|||
job_artifacts_requirements job_artifacts_coverage_fuzzing
|
||||
job_artifacts_requirements_v2
|
||||
job_artifacts_api_fuzzing terraform_state_versions job_artifacts_cyclonedx
|
||||
job_annotations].freeze
|
||||
job_annotations job_artifacts_annotations].freeze
|
||||
end
|
||||
|
||||
let(:ignore_accessors) do
|
||||
|
|
|
|||
|
|
@ -248,6 +248,7 @@ RSpec.describe PlanLimits do
|
|||
ci_max_artifact_size_requirements_v2
|
||||
ci_max_artifact_size_coverage_fuzzing
|
||||
ci_max_artifact_size_api_fuzzing
|
||||
ci_max_artifact_size_annotations
|
||||
]
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe PoolRepository do
|
||||
RSpec.describe PoolRepository, feature_category: :source_code_management do
|
||||
describe 'associations' do
|
||||
it { is_expected.to belong_to(:shard) }
|
||||
it { is_expected.to belong_to(:source_project) }
|
||||
|
|
@ -16,12 +16,43 @@ RSpec.describe PoolRepository do
|
|||
it { is_expected.to validate_presence_of(:source_project) }
|
||||
end
|
||||
|
||||
describe 'scopes' do
|
||||
let_it_be(:project1) { create(:project) }
|
||||
let_it_be(:project2) { create(:project) }
|
||||
let_it_be(:new_shard) { create(:shard, name: 'new') }
|
||||
let_it_be(:pool_repository1) { create(:pool_repository, source_project: project1) }
|
||||
let_it_be(:pool_repository2) { create(:pool_repository, source_project: project1, shard: new_shard) }
|
||||
let_it_be(:another_pool_repository) { create(:pool_repository, source_project: project2) }
|
||||
|
||||
describe '.by_source_project' do
|
||||
subject { described_class.by_source_project(project1) }
|
||||
|
||||
it 'returns pool repositories per source project from all shards' do
|
||||
is_expected.to match_array([pool_repository1, pool_repository2])
|
||||
end
|
||||
end
|
||||
|
||||
describe '.by_source_project_and_shard_name' do
|
||||
subject { described_class.by_source_project_and_shard_name(project1, new_shard.name) }
|
||||
|
||||
it 'returns only a requested pool repository' do
|
||||
is_expected.to match_array([pool_repository2])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#disk_path' do
|
||||
it 'sets the hashed disk_path' do
|
||||
pool = create(:pool_repository)
|
||||
|
||||
expect(pool.disk_path).to match(%r{\A@pools/\h{2}/\h{2}/\h{64}})
|
||||
end
|
||||
|
||||
it 'keeps disk_path if already provided' do
|
||||
pool = create(:pool_repository, disk_path: '@pools/aa/bbbb')
|
||||
|
||||
expect(pool.disk_path).to eq('@pools/aa/bbbb')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#unlink_repository' do
|
||||
|
|
|
|||
|
|
@ -3045,6 +3045,34 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
|
|||
shard_name: 'foo'
|
||||
)
|
||||
end
|
||||
|
||||
it 'refreshes a memoized repository value' do
|
||||
previous_repository = project.repository
|
||||
|
||||
allow(project).to receive(:disk_path).and_return('fancy/new/path')
|
||||
allow(project).to receive(:repository_storage).and_return('foo')
|
||||
|
||||
project.track_project_repository
|
||||
|
||||
expect(project.repository).not_to eq(previous_repository)
|
||||
end
|
||||
|
||||
context 'when "replicate_object_pool_on_move" FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(replicate_object_pool_on_move: false)
|
||||
end
|
||||
|
||||
it 'does not update a memoized repository value' do
|
||||
previous_repository = project.repository
|
||||
|
||||
allow(project).to receive(:disk_path).and_return('fancy/new/path')
|
||||
allow(project).to receive(:repository_storage).and_return('foo')
|
||||
|
||||
project.track_project_repository
|
||||
|
||||
expect(project.repository).to eq(previous_repository)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -6951,6 +6979,73 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
|
|||
end
|
||||
end
|
||||
|
||||
describe '#swap_pool_repository!' do
|
||||
subject(:swap_pool_repository!) { project.swap_pool_repository! }
|
||||
|
||||
let_it_be_with_reload(:project) { create(:project, :empty_repo) }
|
||||
let_it_be(:shard_to) { create(:shard, name: 'test_second_storage') }
|
||||
|
||||
let!(:pool1) { create(:pool_repository, source_project: project) }
|
||||
let!(:pool2) { create(:pool_repository, shard: shard_to, source_project: project) }
|
||||
let(:project_pool) { pool1 }
|
||||
let(:repository_storage) { shard_to.name }
|
||||
|
||||
before do
|
||||
stub_storage_settings(
|
||||
'test_second_storage' => {
|
||||
'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
|
||||
'path' => TestEnv::SECOND_STORAGE_PATH
|
||||
}
|
||||
)
|
||||
|
||||
project.update!(pool_repository: project_pool, repository_storage: repository_storage)
|
||||
end
|
||||
|
||||
shared_examples 'no pool repository swap' do
|
||||
it 'does not change pool repository for the project' do
|
||||
expect { swap_pool_repository! }.not_to change { project.reload.pool_repository }
|
||||
end
|
||||
end
|
||||
|
||||
it 'moves project to the new pool repository' do
|
||||
expect { swap_pool_repository! }.to change { project.reload.pool_repository }.from(pool1).to(pool2)
|
||||
end
|
||||
|
||||
context 'when feature flag replicate_object_pool_on_move is disabled' do
|
||||
before do
|
||||
stub_feature_flags(replicate_object_pool_on_move: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'no pool repository swap'
|
||||
end
|
||||
|
||||
context 'when repository does not exist' do
|
||||
let(:project) { build(:project) }
|
||||
|
||||
it_behaves_like 'no pool repository swap'
|
||||
end
|
||||
|
||||
context 'when project does not have a pool repository' do
|
||||
let(:project_pool) { nil }
|
||||
|
||||
it_behaves_like 'no pool repository swap'
|
||||
end
|
||||
|
||||
context 'when project pool is on the same shard as repository' do
|
||||
let(:project_pool) { pool2 }
|
||||
|
||||
it_behaves_like 'no pool repository swap'
|
||||
end
|
||||
|
||||
context 'when pool repository for shard is missing' do
|
||||
let(:pool2) { nil }
|
||||
|
||||
it 'raises record not found error' do
|
||||
expect { swap_pool_repository! }.to raise_error(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#leave_pool_repository' do
|
||||
let(:pool) { create(:pool_repository) }
|
||||
let(:project) { create(:project, :repository, pool_repository: pool) }
|
||||
|
|
@ -6978,6 +7073,53 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
|
|||
end
|
||||
end
|
||||
|
||||
describe '#link_pool_repository' do
|
||||
let(:pool) { create(:pool_repository) }
|
||||
let(:project) { build(:project, :empty_repo, pool_repository: pool) }
|
||||
|
||||
subject { project.link_pool_repository }
|
||||
|
||||
it 'links pool repository to project repository' do
|
||||
expect(pool).to receive(:link_repository).with(project.repository)
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
context 'when pool repository is missing' do
|
||||
let(:pool) { nil }
|
||||
|
||||
it 'does not link anything' do
|
||||
allow_next_instance_of(PoolRepository) do |pool_repository|
|
||||
expect(pool_repository).not_to receive(:link_repository)
|
||||
end
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pool repository is on the different shard as project repository' do
|
||||
let(:pool) { create(:pool_repository, shard: create(:shard, name: 'new')) }
|
||||
|
||||
it 'does not link anything' do
|
||||
expect(pool).not_to receive(:link_repository)
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
context 'when feature flag replicate_object_pool_on_move is disabled' do
|
||||
before do
|
||||
stub_feature_flags(replicate_object_pool_on_move: false)
|
||||
end
|
||||
|
||||
it 'links pool repository to project repository' do
|
||||
expect(pool).to receive(:link_repository).with(project.repository)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#check_personal_projects_limit' do
|
||||
context 'when creating a project for a group' do
|
||||
it 'does nothing' do
|
||||
|
|
|
|||
|
|
@ -573,13 +573,9 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
|
|||
subject
|
||||
end
|
||||
|
||||
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:action) { ::Gitlab::UsageDataCounters::EditorUniqueCounter::EDIT_BY_WEB_IDE }
|
||||
let(:namespace) { project.namespace.reload }
|
||||
let(:category) { 'Gitlab::UsageDataCounters::EditorUniqueCounter' }
|
||||
let(:action) { 'ide_edit' }
|
||||
let(:property) { 'g_edit_by_web_ide' }
|
||||
let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit' }
|
||||
let(:context) { [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_context] }
|
||||
end
|
||||
|
||||
context 'counts.web_ide_commits Snowplow event tracking' do
|
||||
|
|
|
|||
|
|
@ -188,16 +188,10 @@ RSpec.describe 'Updating a Snippet', feature_category: :source_code_management d
|
|||
stub_session('warden.user.user.key' => [[current_user.id], current_user.authenticatable_salt])
|
||||
end
|
||||
|
||||
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:action) { ::Gitlab::UsageDataCounters::EditorUniqueCounter::EDIT_BY_SNIPPET_EDITOR }
|
||||
let(:user) { current_user }
|
||||
let(:property) { 'g_edit_by_snippet_ide' }
|
||||
let(:namespace) { project.namespace }
|
||||
let(:category) { 'Gitlab::UsageDataCounters::EditorUniqueCounter' }
|
||||
let(:action) { 'ide_edit' }
|
||||
let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit' }
|
||||
let(:context) do
|
||||
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_context]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -321,6 +321,45 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state
|
|||
end
|
||||
end
|
||||
|
||||
shared_examples_for 'handling annotations' do |storage_type|
|
||||
context 'when artifact type is annotations' do
|
||||
let(:params) do
|
||||
{
|
||||
'artifact_type' => 'annotations',
|
||||
'artifact_format' => 'gzip'
|
||||
}.with_indifferent_access
|
||||
end
|
||||
|
||||
if storage_type == :object_storage
|
||||
let(:object_body) { File.read('spec/fixtures/gl-annotations.json.gz') }
|
||||
let(:upload_filename) { 'gl-annotations.json.gz' }
|
||||
|
||||
before do
|
||||
stub_request(:get, %r{s3.amazonaws.com/#{remote_path}})
|
||||
.to_return(status: 200, body: File.read('spec/fixtures/gl-annotations.json.gz'))
|
||||
end
|
||||
else
|
||||
let(:artifacts_file) do
|
||||
file_to_upload('spec/fixtures/gl-annotations.json.gz', sha256: artifacts_sha256)
|
||||
end
|
||||
end
|
||||
|
||||
it 'calls parse service' do
|
||||
expect_any_instance_of(Ci::ParseAnnotationsArtifactService) do |service|
|
||||
expect(service).to receive(:execute).once.and_call_original
|
||||
end
|
||||
|
||||
expect(execute[:status]).to eq(:success)
|
||||
expect(job.job_annotations.as_json).to contain_exactly(
|
||||
hash_including('name' => 'external_links', 'data' => [
|
||||
hash_including('external_link' => hash_including('label' => 'URL 1', 'url' => 'https://url1.example.com/')),
|
||||
hash_including('external_link' => hash_including('label' => 'URL 2', 'url' => 'https://url2.example.com/'))
|
||||
])
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples_for 'handling object storage errors' do
|
||||
shared_examples 'rescues object storage error' do |klass, message, expected_message|
|
||||
it "handles #{klass}" do
|
||||
|
|
@ -495,6 +534,7 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state
|
|||
|
||||
it_behaves_like 'handling uploads'
|
||||
it_behaves_like 'handling dotenv', :object_storage
|
||||
it_behaves_like 'handling annotations', :object_storage
|
||||
it_behaves_like 'handling object storage errors'
|
||||
it_behaves_like 'validating requirements'
|
||||
end
|
||||
|
|
@ -506,6 +546,7 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state
|
|||
|
||||
it_behaves_like 'handling uploads'
|
||||
it_behaves_like 'handling dotenv', :local_storage
|
||||
it_behaves_like 'handling annotations', :local_storage
|
||||
it_behaves_like 'validating requirements'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,182 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::ParseAnnotationsArtifactService, feature_category: :build_artifacts do
|
||||
let_it_be(:project) { create(:project) }
|
||||
|
||||
let_it_be_with_reload(:build) { create(:ci_build, project: project) }
|
||||
let(:service) { described_class.new(project, nil) }
|
||||
|
||||
describe '#execute' do
|
||||
subject { service.execute(artifact) }
|
||||
|
||||
context 'when build has an annotations artifact' do
|
||||
let_it_be(:artifact) { create(:ci_job_artifact, :annotations, job: build) }
|
||||
|
||||
context 'when artifact does not have the specified blob' do
|
||||
before do
|
||||
allow(artifact).to receive(:each_blob)
|
||||
end
|
||||
|
||||
it 'parses nothing' do
|
||||
expect(subject[:status]).to eq(:success)
|
||||
|
||||
expect(build.job_annotations).to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
context 'when artifact has the specified blob' do
|
||||
let(:blob) { data.to_json }
|
||||
|
||||
before do
|
||||
allow(artifact).to receive(:each_blob).and_yield(blob)
|
||||
end
|
||||
|
||||
context 'when valid annotations are given' do
|
||||
let(:data) do
|
||||
{
|
||||
external_links: [
|
||||
{
|
||||
external_link: {
|
||||
label: 'URL 1',
|
||||
url: 'https://url1.example.com/'
|
||||
}
|
||||
},
|
||||
{
|
||||
external_link: {
|
||||
label: 'URL 2',
|
||||
url: 'https://url2.example.com/'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
|
||||
it 'parses the artifact' do
|
||||
subject
|
||||
|
||||
expect(build.job_annotations.as_json).to contain_exactly(
|
||||
hash_including('name' => 'external_links', 'data' => [
|
||||
hash_including('external_link' => hash_including('label' => 'URL 1', 'url' => 'https://url1.example.com/')),
|
||||
hash_including('external_link' => hash_including('label' => 'URL 2', 'url' => 'https://url2.example.com/'))
|
||||
])
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when valid annotations are given and annotation list name is the same' do
|
||||
before do
|
||||
build.job_annotations.create!(name: 'external_links', data: [
|
||||
{
|
||||
external_link: {
|
||||
label: 'URL 1',
|
||||
url: 'https://url1.example.com/'
|
||||
}
|
||||
}
|
||||
])
|
||||
end
|
||||
|
||||
let(:data) do
|
||||
{
|
||||
external_links: [
|
||||
{
|
||||
external_link: {
|
||||
label: 'URL 2',
|
||||
url: 'https://url2.example.com/'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
|
||||
it 'parses the artifact' do
|
||||
subject
|
||||
|
||||
expect(build.job_annotations.as_json).to contain_exactly(
|
||||
hash_including('name' => 'external_links', 'data' => [
|
||||
hash_including('external_link' => hash_including('label' => 'URL 2', 'url' => 'https://url2.example.com/'))
|
||||
])
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when invalid JSON is given' do
|
||||
let(:blob) { 'Invalid JSON!' }
|
||||
|
||||
it 'returns error' do
|
||||
expect(subject[:status]).to eq(:error)
|
||||
expect(subject[:http_status]).to eq(:bad_request)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when root is not an object' do
|
||||
let(:data) { [] }
|
||||
|
||||
it 'returns error' do
|
||||
expect(subject[:status]).to eq(:error)
|
||||
expect(subject[:message]).to eq('Annotations files must be a JSON object')
|
||||
expect(subject[:http_status]).to eq(:bad_request)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when item is not a valid annotation list' do
|
||||
let(:data) { { external_links: {} } }
|
||||
|
||||
it 'returns error' do
|
||||
expect(subject[:status]).to eq(:error)
|
||||
expect(subject[:message]).to eq('Validation failed: Data must be a valid json schema')
|
||||
expect(subject[:http_status]).to eq(:bad_request)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when more than limitated annotations are specified in annotations' do
|
||||
let(:data) do
|
||||
{
|
||||
external_links_1: [
|
||||
{
|
||||
external_link: {
|
||||
label: 'URL',
|
||||
url: 'https://example.com/'
|
||||
}
|
||||
}
|
||||
],
|
||||
external_links_2: [
|
||||
{
|
||||
external_link: {
|
||||
label: 'URL',
|
||||
url: 'https://example.com/'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
allow(service).to receive(:annotations_num_limit).and_return(1)
|
||||
end
|
||||
|
||||
it 'returns error' do
|
||||
expect(subject[:status]).to eq(:error)
|
||||
expect(subject[:message]).to eq(
|
||||
"Annotations files cannot have more than #{service.send(:annotations_num_limit)} annotation lists")
|
||||
expect(subject[:http_status]).to eq(:bad_request)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when artifact size is too big' do
|
||||
before do
|
||||
allow(artifact.file).to receive(:size) { service.send(:annotations_size_limit) + 1.kilobyte }
|
||||
end
|
||||
|
||||
it 'returns error' do
|
||||
expect(subject[:status]).to eq(:error)
|
||||
expect(subject[:message]).to eq(
|
||||
"Annotations Artifact Too Big. Maximum Allowable Size: #{service.send(:annotations_size_limit)}")
|
||||
expect(subject[:http_status]).to eq(:bad_request)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -380,7 +380,7 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
|
|||
end
|
||||
|
||||
context 'when a project is already forked' do
|
||||
it 'creates a new poolresository after the project is moved to a new shard' do
|
||||
it 'creates a new pool repository after the project is moved to a new shard' do
|
||||
project = create(:project, :public, :repository)
|
||||
fork_before_move = fork_project(project, nil, using_service: true)
|
||||
|
||||
|
|
@ -393,6 +393,9 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
|
|||
allow_any_instance_of(Gitlab::Git::Repository).to receive(:replicate)
|
||||
allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum)
|
||||
.and_return(::Gitlab::Git::BLANK_SHA)
|
||||
allow_next_instance_of(Gitlab::Git::ObjectPool) do |object_pool|
|
||||
allow(object_pool).to receive(:link)
|
||||
end
|
||||
|
||||
storage_move = create(
|
||||
:project_repository_storage_move,
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue