From 29549d052d62b949830873f76b6c77d97cbd533d Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Fri, 11 Aug 2023 12:10:27 +0000 Subject: [PATCH] Add latest changes from gitlab-org/gitlab@master --- GITALY_SERVER_VERSION | 2 +- .../components/message_form.vue | 33 ++- .../contribution_event_closed.vue | 13 +- .../contribution_event_created.vue | 14 +- .../contribution_event_reopened.vue | 36 +++ .../components/contribution_events.vue | 5 + .../contribution_events/constants.js | 32 ++- .../javascripts/contribution_events/utils.js | 9 + .../components/edit_environment.vue | 7 +- .../components/environment_form.vue | 7 +- .../components/new_environment_item.vue | 10 +- .../graphql/queries/environment.query.graphql | 1 + .../environment_cluster_agent.query.graphql | 1 + ...cluster_agent_with_namespace.query.graphql | 20 -- .../environment_with_namespace.graphql | 15 -- .../components/blob_content_viewer.vue | 2 +- .../vue_merge_request_widget/index.js | 2 +- .../source_viewer/source_viewer_new.vue | 4 + .../projects/environments_controller.rb | 4 - app/finders/issuable_finder/params.rb | 2 +- app/models/ci/job_annotation.rb | 4 +- app/models/ci/job_artifact.rb | 7 +- app/models/pool_repository.rb | 12 +- app/models/project.rb | 32 ++- .../ci/job_artifacts/create_service.rb | 5 + .../ci/parse_annotations_artifact_service.rb | 61 +++++ .../update_repository_storage_methods.rb | 13 +- .../update_repository_storage_service.rb | 70 +++++- ...nalEventTracking_g_edit_by_snippet_ide.yml | 25 ++ ...nternalEventTracking_g_edit_by_web_ide.yml | 25 ++ ...08_InternalEventTracking_g_edit_by_sfe.yml | 25 ++ .../replicate_object_pool_on_move.yml | 8 + .../emit_sidekiq_histogram_metrics.yml} | 12 +- ...210216180330_g_edit_by_web_ide_monthly.yml | 3 + .../20210216180334_g_edit_by_sfe_monthly.yml | 3 + ...16180338_g_edit_by_snippet_ide_monthly.yml | 3 + ...0210216180328_g_edit_by_web_ide_weekly.yml | 3 + .../20210216180332_g_edit_by_sfe_weekly.yml | 3 + ...216180336_g_edit_by_snippet_ide_weekly.yml | 3 + ...1609_add_ci_job_annotations_plan_limits.rb | 9 + db/schema_migrations/20230728171609 | 1 + db/structure.sql | 5 +- doc/api/graphql/reference/index.md | 1 + doc/ci/environments/kubernetes_dashboard.md | 2 +- doc/operations/error_tracking.md | 6 +- .../index.md | 4 +- doc/user/group/manage.md | 17 +- doc/user/group/saml_sso/group_sync.md | 2 +- ...k_environment_aware_any_oversized_blobs.rb | 14 +- lib/gitlab/ci/config/entry/reports.rb | 3 +- .../transformers/yml/v1/prometheus_metrics.rb | 54 ----- lib/gitlab/metrics/dashboard/validator.rb | 30 --- .../metrics/dashboard/validator/client.rb | 56 ----- .../dashboard/validator/custom_formats.rb | 23 -- .../metrics/dashboard/validator/errors.rb | 60 ----- .../validator/post_schema_validator.rb | 52 ---- .../dashboard/validator/schemas/axis.json | 14 -- .../validator/schemas/dashboard.json | 18 -- .../dashboard/validator/schemas/link.json | 12 - .../dashboard/validator/schemas/metric.json | 16 -- .../dashboard/validator/schemas/panel.json | 24 -- .../validator/schemas/panel_group.json | 12 - .../validator/schemas/templating.json | 7 - .../sidekiq_middleware/server_metrics.rb | 39 ++- .../editor_unique_counter.rb | 28 +-- locale/gitlab.pot | 80 ++++--- package.json | 2 +- spec/factories/ci/job_artifacts.rb | 10 + spec/fixtures/gl-annotations.json.gz | Bin 0 -> 124 bytes .../components/message_form_spec.js | 130 ++++++---- .../contribution_event_reopened_spec.js | 60 +++++ .../components/contribution_events_spec.js | 4 + spec/frontend/contribution_events/utils.js | 25 +- .../contribution_events/utils_spec.js | 24 ++ .../environments/edit_environment_spec.js | 20 +- .../environments/environment_form_spec.js | 225 ++++++++---------- .../environments/new_environment_item_spec.js | 59 +---- spec/frontend/fixtures/users.rb | 2 +- .../source_viewer/source_viewer_new_spec.js | 15 +- ...ironment_aware_any_oversized_blobs_spec.rb | 25 +- .../gitlab/ci/config/entry/reports_spec.rb | 1 + spec/lib/gitlab/import_export/all_models.yml | 1 + .../yml/v1/prometheus_metrics_spec.rb | 99 -------- .../dashboard/validator/client_spec.rb | 29 --- .../validator/custom_formats_spec.rb | 15 -- .../dashboard/validator/errors_spec.rb | 149 ------------ .../validator/post_schema_validator_spec.rb | 78 ------ .../metrics/dashboard/validator_spec.rb | 146 ------------ .../sidekiq_middleware/server_metrics_spec.rb | 61 +++++ .../editor_unique_counter_spec.rb | 30 +-- spec/models/ci/job_annotation_spec.rb | 1 - spec/models/ci/processable_spec.rb | 2 +- spec/models/plan_limits_spec.rb | 1 + spec/models/pool_repository_spec.rb | 33 ++- spec/models/project_spec.rb | 142 +++++++++++ spec/requests/api/commits_spec.rb | 8 +- .../graphql/mutations/snippets/update_spec.rb | 10 +- .../ci/job_artifacts/create_service_spec.rb | 41 ++++ ...parse_annotations_artifact_service_spec.rb | 182 ++++++++++++++ spec/services/projects/fork_service_spec.rb | 5 +- .../update_repository_storage_service_spec.rb | 171 +++++++++++-- .../server_metrics_shared_context.rb | 10 +- ...user_contribution_events_shared_context.rb | 1 + .../finders/issues_finder_shared_examples.rb | 8 + yarn.lock | 40 ++-- 105 files changed, 1590 insertions(+), 1400 deletions(-) create mode 100644 app/assets/javascripts/contribution_events/components/contribution_event/contribution_event_reopened.vue create mode 100644 app/assets/javascripts/contribution_events/utils.js delete mode 100644 app/assets/javascripts/environments/graphql/queries/environment_cluster_agent_with_namespace.query.graphql delete mode 100644 app/assets/javascripts/environments/graphql/queries/environment_with_namespace.graphql create mode 100644 app/services/ci/parse_annotations_artifact_service.rb create mode 100644 config/events/20230807084242_InternalEventTracking_g_edit_by_snippet_ide.yml create mode 100644 config/events/20230807084603_InternalEventTracking_g_edit_by_web_ide.yml create mode 100644 config/events/20230808041008_InternalEventTracking_g_edit_by_sfe.yml create mode 100644 config/feature_flags/development/replicate_object_pool_on_move.yml rename config/feature_flags/{development/kubernetes_namespace_for_environment.yml => ops/emit_sidekiq_histogram_metrics.yml} (54%) create mode 100644 db/migrate/20230728171609_add_ci_job_annotations_plan_limits.rb create mode 100644 db/schema_migrations/20230728171609 delete mode 100644 lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics.rb delete mode 100644 lib/gitlab/metrics/dashboard/validator.rb delete mode 100644 lib/gitlab/metrics/dashboard/validator/client.rb delete mode 100644 lib/gitlab/metrics/dashboard/validator/custom_formats.rb delete mode 100644 lib/gitlab/metrics/dashboard/validator/errors.rb delete mode 100644 lib/gitlab/metrics/dashboard/validator/post_schema_validator.rb delete mode 100644 lib/gitlab/metrics/dashboard/validator/schemas/axis.json delete mode 100644 lib/gitlab/metrics/dashboard/validator/schemas/dashboard.json delete mode 100644 lib/gitlab/metrics/dashboard/validator/schemas/link.json delete mode 100644 lib/gitlab/metrics/dashboard/validator/schemas/metric.json delete mode 100644 lib/gitlab/metrics/dashboard/validator/schemas/panel.json delete mode 100644 lib/gitlab/metrics/dashboard/validator/schemas/panel_group.json delete mode 100644 lib/gitlab/metrics/dashboard/validator/schemas/templating.json create mode 100644 spec/fixtures/gl-annotations.json.gz create mode 100644 spec/frontend/contribution_events/components/contribution_event/contribution_event_reopened_spec.js create mode 100644 spec/frontend/contribution_events/utils_spec.js delete mode 100644 spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb delete mode 100644 spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb delete mode 100644 spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb delete mode 100644 spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb delete mode 100644 spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb delete mode 100644 spec/lib/gitlab/metrics/dashboard/validator_spec.rb create mode 100644 spec/services/ci/parse_annotations_artifact_service_spec.rb diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION index b4403e3c68f..ec8377b4ed5 100644 --- a/GITALY_SERVER_VERSION +++ b/GITALY_SERVER_VERSION @@ -1 +1 @@ -0e78015ff2052203845e049be8b3395bac782554 +bb342a5916dd5ecffb8b281de8290066176c2662 diff --git a/app/assets/javascripts/admin/broadcast_messages/components/message_form.vue b/app/assets/javascripts/admin/broadcast_messages/components/message_form.vue index b06b6e1437a..109df943c42 100644 --- a/app/assets/javascripts/admin/broadcast_messages/components/message_form.vue +++ b/app/assets/javascripts/admin/broadcast_messages/components/message_form.vue @@ -69,8 +69,13 @@ export default { dismissableDescription: s__('BroadcastMessages|Allow users to dismiss the broadcast message'), target: s__('BroadcastMessages|Target broadcast message'), targetRoles: s__('BroadcastMessages|Target roles'), + targetRolesRequired: s__('BroadcastMessages|Select at least one role.'), + targetRolesValidationMsg: s__('BroadcastMessages|One or more roles is required.'), targetPath: s__('BroadcastMessages|Target Path'), - targetPathDescription: s__('BroadcastMessages|Paths can contain wildcards, like */welcome'), + targetPathDescription: s__('BroadcastMessages|Paths can contain wildcards, like */welcome.'), + targetPathWithRolesReminder: s__( + 'BroadcastMessages|Leave blank to target all group and project pages.', + ), startsAt: s__('BroadcastMessages|Starts at'), endsAt: s__('BroadcastMessages|Ends at'), add: s__('BroadcastMessages|Add broadcast message'), @@ -110,6 +115,7 @@ export default { endsAt: new Date(this.broadcastMessage.endsAt.getTime()), renderedMessage: '', showInCli: this.broadcastMessage.showInCli, + isValidated: false, }; }, computed: { @@ -138,6 +144,18 @@ export default { this.targetSelected === TARGET_ROLES || this.targetSelected === TARGET_ALL_MATCHING_PATH ); }, + targetPathDescription() { + const defaultDescription = this.$options.i18n.targetPathDescription; + + if (this.showTargetRoles) { + return `${defaultDescription} ${this.$options.i18n.targetPathWithRolesReminder}`; + } + + return defaultDescription; + }, + targetRolesValid() { + return !this.showTargetRoles || this.targetAccessLevels.length > 0; + }, formPayload() { return JSON.stringify({ message: this.message, @@ -177,6 +195,12 @@ export default { }, async onSubmit() { this.loading = true; + this.isValidated = true; + + if (!this.targetRolesValid) { + this.loading = false; + return; + } const success = await this.submitForm(); if (success) { @@ -294,6 +318,9 @@ export default { @@ -307,7 +334,7 @@ export default { > - {{ $options.i18n.targetPathDescription }} + {{ targetPathDescription }} @@ -326,7 +353,7 @@ export default { :loading="loading" :disabled="messageBlank" data-testid="submit-button" - class="gl-mr-2" + class="js-no-auto-disable gl-mr-2" > {{ isAddForm ? $options.i18n.add : $options.i18n.update }} diff --git a/app/assets/javascripts/contribution_events/components/contribution_event/contribution_event_closed.vue b/app/assets/javascripts/contribution_events/components/contribution_event/contribution_event_closed.vue index 1b209e97ed5..85c42ca5485 100644 --- a/app/assets/javascripts/contribution_events/components/contribution_event/contribution_event_closed.vue +++ b/app/assets/javascripts/contribution_events/components/contribution_event/contribution_event_closed.vue @@ -4,6 +4,7 @@ import { TARGET_TYPE_MERGE_REQUEST, EVENT_CLOSED_ICONS, } from 'ee_else_ce/contribution_events/constants'; +import { getValueByEventTarget } from '../../utils'; import ContributionEventBase from './contribution_event_base.vue'; export default { @@ -16,20 +17,14 @@ export default { }, }, computed: { - target() { - return this.event.target; - }, targetType() { - return this.target.type; - }, - issueType() { - return this.target.issue_type; + return this.event.target.type; }, message() { - return EVENT_CLOSED_I18N[this.issueType || this.targetType] || EVENT_CLOSED_I18N.fallback; + return getValueByEventTarget(EVENT_CLOSED_I18N, this.event); }, iconName() { - return EVENT_CLOSED_ICONS[this.issueType || this.targetType] || EVENT_CLOSED_ICONS.fallback; + return getValueByEventTarget(EVENT_CLOSED_ICONS, this.event); }, iconClass() { return this.targetType === TARGET_TYPE_MERGE_REQUEST ? 'gl-text-red-500' : 'gl-text-blue-500'; diff --git a/app/assets/javascripts/contribution_events/components/contribution_event/contribution_event_created.vue b/app/assets/javascripts/contribution_events/components/contribution_event/contribution_event_created.vue index 6c861e18a2f..7915cd6679d 100644 --- a/app/assets/javascripts/contribution_events/components/contribution_event/contribution_event_created.vue +++ b/app/assets/javascripts/contribution_events/components/contribution_event/contribution_event_created.vue @@ -1,5 +1,10 @@ + + diff --git a/app/assets/javascripts/contribution_events/components/contribution_events.vue b/app/assets/javascripts/contribution_events/components/contribution_events.vue index d93fe96abfd..f161afa6048 100644 --- a/app/assets/javascripts/contribution_events/components/contribution_events.vue +++ b/app/assets/javascripts/contribution_events/components/contribution_events.vue @@ -10,6 +10,7 @@ import { EVENT_TYPE_MERGED, EVENT_TYPE_CREATED, EVENT_TYPE_CLOSED, + EVENT_TYPE_REOPENED, } from '../constants'; import ContributionEventApproved from './contribution_event/contribution_event_approved.vue'; import ContributionEventExpired from './contribution_event/contribution_event_expired.vue'; @@ -20,6 +21,7 @@ import ContributionEventPrivate from './contribution_event/contribution_event_pr import ContributionEventMerged from './contribution_event/contribution_event_merged.vue'; import ContributionEventCreated from './contribution_event/contribution_event_created.vue'; import ContributionEventClosed from './contribution_event/contribution_event_closed.vue'; +import ContributionEventReopened from './contribution_event/contribution_event_reopened.vue'; export default { props: { @@ -141,6 +143,9 @@ export default { case EVENT_TYPE_CLOSED: return ContributionEventClosed; + case EVENT_TYPE_REOPENED: + return ContributionEventReopened; + default: return EmptyComponent; } diff --git a/app/assets/javascripts/contribution_events/constants.js b/app/assets/javascripts/contribution_events/constants.js index ab242d2b1a9..6d1c5aa1b4c 100644 --- a/app/assets/javascripts/contribution_events/constants.js +++ b/app/assets/javascripts/contribution_events/constants.js @@ -34,6 +34,8 @@ export const WORK_ITEM_ISSUE_TYPE_ISSUE = 'issue'; export const WORK_ITEM_ISSUE_TYPE_TASK = 'task'; export const WORK_ITEM_ISSUE_TYPE_INCIDENT = 'incident'; +export const TYPE_FALLBACK = 'fallback'; + export const EVENT_CREATED_I18N = { [RESOURCE_PARENT_TYPE_PROJECT]: s__('ContributionEvent|Created project %{resourceParentLink}.'), [TARGET_TYPE_MILESTONE]: s__( @@ -57,7 +59,7 @@ export const EVENT_CREATED_I18N = { [WORK_ITEM_ISSUE_TYPE_INCIDENT]: s__( 'ContributionEvent|Opened incident %{targetLink} in %{resourceParentLink}.', ), - fallback: s__('ContributionEvent|Created resource.'), + [TYPE_FALLBACK]: s__('ContributionEvent|Created resource.'), }; export const EVENT_CLOSED_I18N = { @@ -76,11 +78,35 @@ export const EVENT_CLOSED_I18N = { [WORK_ITEM_ISSUE_TYPE_INCIDENT]: s__( 'ContributionEvent|Closed incident %{targetLink} in %{resourceParentLink}.', ), - fallback: s__('ContributionEvent|Closed resource.'), + [TYPE_FALLBACK]: s__('ContributionEvent|Closed resource.'), +}; + +export const EVENT_REOPENED_I18N = { + [TARGET_TYPE_MILESTONE]: s__( + 'ContributionEvent|Reopened milestone %{targetLink} in %{resourceParentLink}.', + ), + [TARGET_TYPE_MERGE_REQUEST]: s__( + 'ContributionEvent|Reopened merge request %{targetLink} in %{resourceParentLink}.', + ), + [WORK_ITEM_ISSUE_TYPE_ISSUE]: s__( + 'ContributionEvent|Reopened issue %{targetLink} in %{resourceParentLink}.', + ), + [WORK_ITEM_ISSUE_TYPE_TASK]: s__( + 'ContributionEvent|Reopened task %{targetLink} in %{resourceParentLink}.', + ), + [WORK_ITEM_ISSUE_TYPE_INCIDENT]: s__( + 'ContributionEvent|Reopened incident %{targetLink} in %{resourceParentLink}.', + ), + [TYPE_FALLBACK]: s__('ContributionEvent|Reopened resource.'), }; export const EVENT_CLOSED_ICONS = { [WORK_ITEM_ISSUE_TYPE_ISSUE]: 'issue-closed', [TARGET_TYPE_MERGE_REQUEST]: 'merge-request-close', - fallback: 'status_closed', + [TYPE_FALLBACK]: 'status_closed', +}; + +export const EVENT_REOPENED_ICONS = { + [TARGET_TYPE_MERGE_REQUEST]: 'merge-request-open', + [TYPE_FALLBACK]: 'status_open', }; diff --git a/app/assets/javascripts/contribution_events/utils.js b/app/assets/javascripts/contribution_events/utils.js new file mode 100644 index 00000000000..0760b5187c6 --- /dev/null +++ b/app/assets/javascripts/contribution_events/utils.js @@ -0,0 +1,9 @@ +import { TYPE_FALLBACK } from './constants'; + +export const getValueByEventTarget = (map, event) => { + const { + target: { type: targetType, issue_type: issueType }, + } = event; + + return map[issueType || targetType] || map[TYPE_FALLBACK]; +}; diff --git a/app/assets/javascripts/environments/components/edit_environment.vue b/app/assets/javascripts/environments/components/edit_environment.vue index a2405d23924..a8e8e9a59e3 100644 --- a/app/assets/javascripts/environments/components/edit_environment.vue +++ b/app/assets/javascripts/environments/components/edit_environment.vue @@ -2,9 +2,7 @@ import { GlLoadingIcon } from '@gitlab/ui'; import { createAlert } from '~/alert'; import { visitUrl } from '~/lib/utils/url_utility'; -import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin'; import getEnvironment from '../graphql/queries/environment.query.graphql'; -import getEnvironmentWithNamespace from '../graphql/queries/environment_with_namespace.graphql'; import updateEnvironment from '../graphql/mutations/update_environment.mutation.graphql'; import EnvironmentForm from './environment_form.vue'; @@ -13,14 +11,11 @@ export default { GlLoadingIcon, EnvironmentForm, }, - mixins: [glFeatureFlagsMixin()], inject: ['projectEnvironmentsPath', 'projectPath', 'environmentName'], apollo: { environment: { query() { - return this.glFeatures?.kubernetesNamespaceForEnvironment - ? getEnvironmentWithNamespace - : getEnvironment; + return getEnvironment; }, variables() { return { diff --git a/app/assets/javascripts/environments/components/environment_form.vue b/app/assets/javascripts/environments/components/environment_form.vue index 1bff013b9c2..745a8a1d3ed 100644 --- a/app/assets/javascripts/environments/components/environment_form.vue +++ b/app/assets/javascripts/environments/components/environment_form.vue @@ -17,7 +17,6 @@ import { ENVIRONMENT_EDIT_HELP_TEXT, } from 'ee_else_ce/environments/constants'; import csrf from '~/lib/utils/csrf'; -import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin'; import { getIdFromGraphQLId } from '~/graphql_shared/utils'; import getNamespacesQuery from '../graphql/queries/k8s_namespaces.query.graphql'; import getUserAuthorizedAgents from '../graphql/queries/user_authorized_agents.query.graphql'; @@ -33,7 +32,6 @@ export default { GlSprintf, GlAlert, }, - mixins: [glFeatureFlagsMixin()], inject: { protectedEnvironmentSettingsPath: { default: '' }, projectPath: { default: '' }, @@ -173,11 +171,8 @@ export default { item.text.toLowerCase().includes(lowerCasedSearchTerm), ); }, - isKasKubernetesNamespaceAvailable() { - return this.glFeatures?.kubernetesNamespaceForEnvironment; - }, showNamespaceSelector() { - return Boolean(this.isKasKubernetesNamespaceAvailable && this.selectedAgentId); + return Boolean(this.selectedAgentId); }, namespaceDropdownToggleText() { return this.selectedNamespace || this.$options.i18n.namespaceHelpText; diff --git a/app/assets/javascripts/environments/components/new_environment_item.vue b/app/assets/javascripts/environments/components/new_environment_item.vue index 555d525c3b6..48a3281c16f 100644 --- a/app/assets/javascripts/environments/components/new_environment_item.vue +++ b/app/assets/javascripts/environments/components/new_environment_item.vue @@ -11,10 +11,8 @@ import { import { __, s__ } from '~/locale'; import { truncate } from '~/lib/utils/text_utility'; import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue'; -import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin'; import isLastDeployment from '../graphql/queries/is_last_deployment.query.graphql'; import getEnvironmentClusterAgent from '../graphql/queries/environment_cluster_agent.query.graphql'; -import getEnvironmentClusterAgentWithNamespace from '../graphql/queries/environment_cluster_agent_with_namespace.query.graphql'; import ExternalUrl from './environment_external_url.vue'; import Actions from './environment_actions.vue'; import StopComponent from './environment_stop.vue'; @@ -52,7 +50,6 @@ export default { directives: { GlTooltip, }, - mixins: [glFeatureFlagsMixin()], inject: ['helpPagePath', 'projectPath'], props: { environment: { @@ -165,9 +162,6 @@ export default { rolloutStatus() { return this.environment?.rolloutStatus; }, - isKubernetesNamespaceAvailable() { - return this.glFeatures?.kubernetesNamespaceForEnvironment; - }, }, methods: { toggleEnvironmentCollapse() { @@ -185,9 +179,7 @@ export default { return { environmentName: this.environment.name, projectFullPath: this.projectPath }; }, query() { - return this.isKubernetesNamespaceAvailable - ? getEnvironmentClusterAgentWithNamespace - : getEnvironmentClusterAgent; + return getEnvironmentClusterAgent; }, update(data) { this.clusterAgent = data?.project?.environment?.clusterAgent; diff --git a/app/assets/javascripts/environments/graphql/queries/environment.query.graphql b/app/assets/javascripts/environments/graphql/queries/environment.query.graphql index 20402e8d32e..53dfe5303f3 100644 --- a/app/assets/javascripts/environments/graphql/queries/environment.query.graphql +++ b/app/assets/javascripts/environments/graphql/queries/environment.query.graphql @@ -5,6 +5,7 @@ query getEnvironment($projectFullPath: ID!, $environmentName: String) { id name externalUrl + kubernetesNamespace clusterAgent { id name diff --git a/app/assets/javascripts/environments/graphql/queries/environment_cluster_agent.query.graphql b/app/assets/javascripts/environments/graphql/queries/environment_cluster_agent.query.graphql index 760f1fba897..19374ae7a81 100644 --- a/app/assets/javascripts/environments/graphql/queries/environment_cluster_agent.query.graphql +++ b/app/assets/javascripts/environments/graphql/queries/environment_cluster_agent.query.graphql @@ -3,6 +3,7 @@ query getEnvironmentClusterAgent($projectFullPath: ID!, $environmentName: String id environment(name: $environmentName) { id + kubernetesNamespace clusterAgent { id name diff --git a/app/assets/javascripts/environments/graphql/queries/environment_cluster_agent_with_namespace.query.graphql b/app/assets/javascripts/environments/graphql/queries/environment_cluster_agent_with_namespace.query.graphql deleted file mode 100644 index 5e72c2dac20..00000000000 --- a/app/assets/javascripts/environments/graphql/queries/environment_cluster_agent_with_namespace.query.graphql +++ /dev/null @@ -1,20 +0,0 @@ -query getEnvironmentClusterAgentWithNamespace($projectFullPath: ID!, $environmentName: String) { - project(fullPath: $projectFullPath) { - id - environment(name: $environmentName) { - id - kubernetesNamespace - clusterAgent { - id - name - webPath - tokens { - nodes { - id - lastUsedAt - } - } - } - } - } -} diff --git a/app/assets/javascripts/environments/graphql/queries/environment_with_namespace.graphql b/app/assets/javascripts/environments/graphql/queries/environment_with_namespace.graphql deleted file mode 100644 index 42796f982b6..00000000000 --- a/app/assets/javascripts/environments/graphql/queries/environment_with_namespace.graphql +++ /dev/null @@ -1,15 +0,0 @@ -query getEnvironmentWithNamespace($projectFullPath: ID!, $environmentName: String) { - project(fullPath: $projectFullPath) { - id - environment(name: $environmentName) { - id - name - externalUrl - kubernetesNamespace - clusterAgent { - id - name - } - } - } -} diff --git a/app/assets/javascripts/repository/components/blob_content_viewer.vue b/app/assets/javascripts/repository/components/blob_content_viewer.vue index 310095b0dd4..6f9f0a81dfd 100644 --- a/app/assets/javascripts/repository/components/blob_content_viewer.vue +++ b/app/assets/javascripts/repository/components/blob_content_viewer.vue @@ -454,7 +454,7 @@ export default { diff --git a/app/assets/javascripts/vue_merge_request_widget/index.js b/app/assets/javascripts/vue_merge_request_widget/index.js index a2f088a7a58..e8b97098a2b 100644 --- a/app/assets/javascripts/vue_merge_request_widget/index.js +++ b/app/assets/javascripts/vue_merge_request_widget/index.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import VueApollo from 'vue-apollo'; -import MrWidgetOptions from 'ee_else_ce/vue_merge_request_widget/mr_widget_options.vue'; +import MrWidgetOptions from 'any_else_ce/vue_merge_request_widget/mr_widget_options.vue'; import createDefaultClient from '~/lib/graphql'; import { parseBoolean } from '~/lib/utils/common_utils'; import Translate from '../vue_shared/translate'; diff --git a/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue b/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue index 8e4c438719e..0fb6e577f32 100644 --- a/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue +++ b/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue @@ -40,6 +40,10 @@ export default { this.track(EVENT_ACTION, { label: EVENT_LABEL_VIEWER, property: this.blob.language }); addBlobLinksTracking(); }, + mounted() { + const { hash } = this.$route; + this.lineHighlighter.highlightHash(hash); + }, userColorScheme: window.gon.user_color_scheme, }; diff --git a/app/controllers/projects/environments_controller.rb b/app/controllers/projects/environments_controller.rb index 55a60e21784..e3c2a9f72f5 100644 --- a/app/controllers/projects/environments_controller.rb +++ b/app/controllers/projects/environments_controller.rb @@ -12,10 +12,6 @@ class Projects::EnvironmentsController < Projects::ApplicationController push_frontend_feature_flag(:environment_details_vue, @project) end - before_action only: [:index, :edit, :new] do - push_frontend_feature_flag(:kubernetes_namespace_for_environment) - end - before_action :authorize_read_environment! before_action :authorize_create_environment!, only: [:new, :create] before_action :authorize_stop_environment!, only: [:stop] diff --git a/app/finders/issuable_finder/params.rb b/app/finders/issuable_finder/params.rb index e59c2224594..bc136848dd5 100644 --- a/app/finders/issuable_finder/params.rb +++ b/app/finders/issuable_finder/params.rb @@ -133,7 +133,7 @@ class IssuableFinder def projects strong_memoize(:projects) do - next [project] if project? + next Array.wrap(project) if project? projects = if current_user && params[:authorized_only].presence && !current_user_related? diff --git a/app/models/ci/job_annotation.rb b/app/models/ci/job_annotation.rb index a8bef02cc42..a6ce4196cc1 100644 --- a/app/models/ci/job_annotation.rb +++ b/app/models/ci/job_annotation.rb @@ -3,6 +3,7 @@ module Ci class JobAnnotation < Ci::ApplicationRecord include Ci::Partitionable + include BulkInsertSafe self.table_name = :p_ci_job_annotations self.primary_key = :id @@ -13,7 +14,6 @@ module Ci validates :data, json_schema: { filename: 'ci_job_annotation_data' } validates :name, presence: true, - length: { maximum: 255 }, - uniqueness: { scope: [:job_id, :partition_id] } + length: { maximum: 255 } end end diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb index 11d70e088e9..3f9b1986b70 100644 --- a/app/models/ci/job_artifact.rb +++ b/app/models/ci/job_artifact.rb @@ -60,7 +60,8 @@ module Ci requirements_v2: 'requirements_v2.json', coverage_fuzzing: 'gl-coverage-fuzzing.json', api_fuzzing: 'gl-api-fuzzing-report.json', - cyclonedx: 'gl-sbom.cdx.json' + cyclonedx: 'gl-sbom.cdx.json', + annotations: 'gl-annotations.json' }.freeze INTERNAL_TYPES = { @@ -79,6 +80,7 @@ module Ci cluster_applications: :gzip, # DEPRECATED: https://gitlab.com/gitlab-org/gitlab/-/issues/361094 lsif: :zip, cyclonedx: :gzip, + annotations: :gzip, # Security reports and license scanning reports are raw artifacts # because they used to be fetched by the frontend, but this is not the case anymore. @@ -221,7 +223,8 @@ module Ci api_fuzzing: 26, ## EE-specific cluster_image_scanning: 27, ## EE-specific cyclonedx: 28, ## EE-specific - requirements_v2: 29 ## EE-specific + requirements_v2: 29, ## EE-specific + annotations: 30 } # `file_location` indicates where actual files are stored. diff --git a/app/models/pool_repository.rb b/app/models/pool_repository.rb index f22a63ee980..bc3898fafe7 100644 --- a/app/models/pool_repository.rb +++ b/app/models/pool_repository.rb @@ -12,7 +12,13 @@ class PoolRepository < ApplicationRecord has_many :member_projects, class_name: 'Project' - after_create :correct_disk_path + after_create :set_disk_path + + scope :by_source_project, ->(project) { where(source_project: project) } + scope :by_source_project_and_shard_name, ->(project, shard_name) do + by_source_project(project) + .for_repository_storage(shard_name) + end state_machine :state, initial: :none do state :scheduled @@ -107,8 +113,8 @@ class PoolRepository < ApplicationRecord private - def correct_disk_path - update!(disk_path: storage.disk_path) + def set_disk_path + update!(disk_path: storage.disk_path) if disk_path.blank? end def storage diff --git a/app/models/project.rb b/app/models/project.rb index 08539a851b5..5c0a30503f7 100644 --- a/app/models/project.rb +++ b/app/models/project.rb @@ -1953,6 +1953,8 @@ class Project < ApplicationRecord def track_project_repository repository = project_repository || build_project_repository repository.update!(shard_name: repository_storage, disk_path: disk_path) + + cleanup if replicate_object_pool_on_move_ff_enabled? end def create_repository(force: false, default_branch: nil) @@ -2827,8 +2829,26 @@ class Project < ApplicationRecord update_column(:pool_repository_id, nil) end + # After repository is moved from shard to shard, disconnect it from the previous object pool and connect to the new pool + def swap_pool_repository! + return unless replicate_object_pool_on_move_ff_enabled? + return unless repository_exists? + + old_pool_repository = pool_repository + return if old_pool_repository.blank? + return if pool_repository_shard_matches_repository?(old_pool_repository) + + new_pool_repository = PoolRepository.by_source_project_and_shard_name(old_pool_repository.source_project, repository_storage).take! + update!(pool_repository: new_pool_repository) + + old_pool_repository.unlink_repository(repository, disconnect: !pending_delete?) + end + def link_pool_repository - pool_repository&.link_repository(repository) + return unless pool_repository + return if (pool_repository.shard_name != repository.shard) && replicate_object_pool_on_move_ff_enabled? + + pool_repository.link_repository(repository) end def has_pool_repository? @@ -3507,6 +3527,16 @@ class Project < ApplicationRecord def runners_token_prefix RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX end + + def replicate_object_pool_on_move_ff_enabled? + Feature.enabled?(:replicate_object_pool_on_move, self) + end + + def pool_repository_shard_matches_repository?(pool) + pool_repository_shard = pool.shard.name + + pool_repository_shard == repository_storage + end end Project.prepend_mod_with('Project') diff --git a/app/services/ci/job_artifacts/create_service.rb b/app/services/ci/job_artifacts/create_service.rb index 3ac0e83232f..c09b0cf81f1 100644 --- a/app/services/ci/job_artifacts/create_service.rb +++ b/app/services/ci/job_artifacts/create_service.rb @@ -138,6 +138,7 @@ module Ci def parse_artifact(artifact) case artifact.file_type when 'dotenv' then parse_dotenv_artifact(artifact) + when 'annotations' then parse_annotations_artifact(artifact) else success end end @@ -188,6 +189,10 @@ module Ci def parse_dotenv_artifact(artifact) Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact) end + + def parse_annotations_artifact(artifact) + Ci::ParseAnnotationsArtifactService.new(project, current_user).execute(artifact) + end end end end diff --git a/app/services/ci/parse_annotations_artifact_service.rb b/app/services/ci/parse_annotations_artifact_service.rb new file mode 100644 index 00000000000..cbda7e827d4 --- /dev/null +++ b/app/services/ci/parse_annotations_artifact_service.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +module Ci + class ParseAnnotationsArtifactService < ::BaseService + include ::Gitlab::Utils::StrongMemoize + include ::Gitlab::EncodingHelper + + SizeLimitError = Class.new(StandardError) + ParserError = Class.new(StandardError) + + def execute(artifact) + return error('Artifact is not annotations file type', :bad_request) unless artifact&.annotations? + + return error("Annotations Artifact Too Big. Maximum Allowable Size: #{annotations_size_limit}", :bad_request) if + artifact.file.size > annotations_size_limit + + annotations = parse!(artifact) + Ci::JobAnnotation.bulk_upsert!(annotations, unique_by: %i[partition_id job_id name]) + + success + rescue SizeLimitError, ParserError, Gitlab::Json.parser_error, ActiveRecord::RecordInvalid => error + error(error.message, :bad_request) + end + + private + + def parse!(artifact) + annotations = [] + + artifact.each_blob do |blob| + # Windows powershell may output UTF-16LE files, so convert the whole file + # to UTF-8 before proceeding. + blob = strip_bom(encode_utf8_with_replacement_character(blob)) + + blob_json = Gitlab::Json.parse(blob) + raise ParserError, 'Annotations files must be a JSON object' unless blob_json.is_a?(Hash) + + blob_json.each do |key, value| + annotations.push(Ci::JobAnnotation.new(job: artifact.job, name: key, data: value)) + + if annotations.size > annotations_num_limit + raise SizeLimitError, + "Annotations files cannot have more than #{annotations_num_limit} annotation lists" + end + end + end + + annotations + end + + def annotations_num_limit + project.actual_limits.ci_job_annotations_num + end + strong_memoize_attr :annotations_num_limit + + def annotations_size_limit + project.actual_limits.ci_job_annotations_size + end + strong_memoize_attr :annotations_size_limit + end +end diff --git a/app/services/concerns/update_repository_storage_methods.rb b/app/services/concerns/update_repository_storage_methods.rb index bb43cab79bb..dca38abf7af 100644 --- a/app/services/concerns/update_repository_storage_methods.rb +++ b/app/services/concerns/update_repository_storage_methods.rb @@ -24,7 +24,13 @@ module UpdateRepositoryStorageMethods return response if response - mirror_repositories unless same_filesystem? + unless same_filesystem? + mirror_repositories + + repository_storage_move.transaction do + mirror_object_pool(destination_storage_name) + end + end repository_storage_move.transaction do repository_storage_move.finish_replication! @@ -53,6 +59,11 @@ module UpdateRepositoryStorageMethods raise NotImplementedError end + def mirror_object_pool(_destination_shard) + # no-op, redefined for Projects::UpdateRepositoryStorageService + nil + end + def mirror_repository(type:) unless wait_for_pushes(type) raise Error, s_('UpdateRepositoryStorage|Timeout waiting for %{type} repository pushes') % { type: type.name } diff --git a/app/services/projects/update_repository_storage_service.rb b/app/services/projects/update_repository_storage_service.rb index cadf3012131..f5f6bb85995 100644 --- a/app/services/projects/update_repository_storage_service.rb +++ b/app/services/projects/update_repository_storage_service.rb @@ -9,12 +9,20 @@ module Projects private def track_repository(_destination_storage_name) - project.leave_pool_repository + # Connect project to pool repository from the new shard + project.swap_pool_repository! + + # Connect project to the repository from the new shard project.track_project_repository + + # Link repository from the new shard to pool repository from the new shard + project.link_pool_repository if replicate_object_pool_on_move_ff_enabled? end def mirror_repositories - mirror_repository(type: Gitlab::GlRepository::PROJECT) if project.repository_exists? + if project.repository_exists? + mirror_repository(type: Gitlab::GlRepository::PROJECT) + end if project.wiki.repository_exists? mirror_repository(type: Gitlab::GlRepository::WIKI) @@ -25,6 +33,30 @@ module Projects end end + def mirror_object_pool(destination_storage_name) + return unless replicate_object_pool_on_move_ff_enabled? + return unless project.repository_exists? + + pool_repository = project.pool_repository + return unless pool_repository + + # If pool repository already exists, then we will link the moved project repository to it + return if pool_repository_exists_for?(shard_name: destination_storage_name, pool_repository: pool_repository) + + target_pool_repository = create_pool_repository_for!( + shard_name: destination_storage_name, + pool_repository: pool_repository + ) + + checksum, new_checksum = replicate_object_pool_repository(from: pool_repository, to: target_pool_repository) + + if checksum != new_checksum + raise Error, + format(s_('UpdateRepositoryStorage|Failed to verify %{type} repository checksum from %{old} to %{new}'), + type: 'object_pool', old: checksum, new: new_checksum) + end + end + def remove_old_paths super @@ -46,5 +78,39 @@ module Projects ).remove end end + + def pool_repository_exists_for?(shard_name:, pool_repository:) + PoolRepository.by_source_project_and_shard_name( + pool_repository.source_project, + shard_name + ).exists? + end + + def create_pool_repository_for!(shard_name:, pool_repository:) + # Set state `ready` because we manually replicate object pool + PoolRepository.create!( + shard: Shard.by_name(shard_name), + source_project: pool_repository.source_project, + disk_path: pool_repository.disk_path, + state: 'ready' + ) + end + + def replicate_object_pool_repository(from:, to:) + old_object_pool = from.object_pool + new_object_pool = to.object_pool + + checksum = old_object_pool.repository.checksum + + new_object_pool.repository.replicate(old_object_pool.repository) + + new_checksum = new_object_pool.repository.checksum + + [checksum, new_checksum] + end + + def replicate_object_pool_on_move_ff_enabled? + Feature.enabled?(:replicate_object_pool_on_move, project) + end end end diff --git a/config/events/20230807084242_InternalEventTracking_g_edit_by_snippet_ide.yml b/config/events/20230807084242_InternalEventTracking_g_edit_by_snippet_ide.yml new file mode 100644 index 00000000000..7eb99c46a97 --- /dev/null +++ b/config/events/20230807084242_InternalEventTracking_g_edit_by_snippet_ide.yml @@ -0,0 +1,25 @@ +--- +description: A snippet has been edited from the Web IDE +category: InternalEventTracking +action: g_edit_by_snippet_ide +label_description: +property_description: +value_description: +extra_properties: +identifiers: +- project +- user +- namespace +product_section: dev +product_stage: create +product_group: source_code +milestone: "16.3" +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128592 +distributions: +- ce +- ee +tiers: +- free +- premium +- ultimate + diff --git a/config/events/20230807084603_InternalEventTracking_g_edit_by_web_ide.yml b/config/events/20230807084603_InternalEventTracking_g_edit_by_web_ide.yml new file mode 100644 index 00000000000..d15f2ea7cfd --- /dev/null +++ b/config/events/20230807084603_InternalEventTracking_g_edit_by_web_ide.yml @@ -0,0 +1,25 @@ +--- +description: A file has been edited from the Web IDE +category: InternalEventTracking +action: g_edit_by_web_ide +label_description: +property_description: +value_description: +extra_properties: +identifiers: +- project +- user +- namespace +product_section: dev +product_stage: create +product_group: source_code +milestone: "16.3" +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128592 +distributions: +- ce +- ee +tiers: +- free +- premium +- ultimate + diff --git a/config/events/20230808041008_InternalEventTracking_g_edit_by_sfe.yml b/config/events/20230808041008_InternalEventTracking_g_edit_by_sfe.yml new file mode 100644 index 00000000000..ec79bc31132 --- /dev/null +++ b/config/events/20230808041008_InternalEventTracking_g_edit_by_sfe.yml @@ -0,0 +1,25 @@ +--- +description: A file has been edited from the single file editor +category: InternalEventTracking +action: g_edit_by_sfe +label_description: +property_description: +value_description: +extra_properties: +identifiers: +- project +- user +- namespace +product_section: dev +product_stage: create +product_group: source_code +milestone: "16.3" +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128592 +distributions: +- ce +- ee +tiers: +- free +- premium +- ultimate + diff --git a/config/feature_flags/development/replicate_object_pool_on_move.yml b/config/feature_flags/development/replicate_object_pool_on_move.yml new file mode 100644 index 00000000000..8f34969a02d --- /dev/null +++ b/config/feature_flags/development/replicate_object_pool_on_move.yml @@ -0,0 +1,8 @@ +--- +name: replicate_object_pool_on_move +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/127143 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/420720 +milestone: '16.3' +type: development +group: group::source code +default_enabled: false diff --git a/config/feature_flags/development/kubernetes_namespace_for_environment.yml b/config/feature_flags/ops/emit_sidekiq_histogram_metrics.yml similarity index 54% rename from config/feature_flags/development/kubernetes_namespace_for_environment.yml rename to config/feature_flags/ops/emit_sidekiq_histogram_metrics.yml index 89c903ca098..3433dc263cc 100644 --- a/config/feature_flags/development/kubernetes_namespace_for_environment.yml +++ b/config/feature_flags/ops/emit_sidekiq_histogram_metrics.yml @@ -1,8 +1,8 @@ --- -name: kubernetes_namespace_for_environment -introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/125191 -rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/417129 -milestone: '16.2' -type: development -group: group::environments +name: emit_sidekiq_histogram_metrics +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128706 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/421499 +milestone: '16.3' +type: ops +group: group::scalability default_enabled: true diff --git a/config/metrics/counts_28d/20210216180330_g_edit_by_web_ide_monthly.yml b/config/metrics/counts_28d/20210216180330_g_edit_by_web_ide_monthly.yml index f45d7fcc784..c4908d7739c 100644 --- a/config/metrics/counts_28d/20210216180330_g_edit_by_web_ide_monthly.yml +++ b/config/metrics/counts_28d/20210216180330_g_edit_by_web_ide_monthly.yml @@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric options: events: - g_edit_by_web_ide +events: + - name: g_edit_by_web_ide + unique: user.id distribution: - ce - ee diff --git a/config/metrics/counts_28d/20210216180334_g_edit_by_sfe_monthly.yml b/config/metrics/counts_28d/20210216180334_g_edit_by_sfe_monthly.yml index 9c0e63e0d8f..b9aca413d7c 100644 --- a/config/metrics/counts_28d/20210216180334_g_edit_by_sfe_monthly.yml +++ b/config/metrics/counts_28d/20210216180334_g_edit_by_sfe_monthly.yml @@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric options: events: - g_edit_by_sfe +events: + - name: g_edit_by_sfe + unique: user.id distribution: - ce - ee diff --git a/config/metrics/counts_28d/20210216180338_g_edit_by_snippet_ide_monthly.yml b/config/metrics/counts_28d/20210216180338_g_edit_by_snippet_ide_monthly.yml index 8074329bd9d..9c439d6a6a3 100644 --- a/config/metrics/counts_28d/20210216180338_g_edit_by_snippet_ide_monthly.yml +++ b/config/metrics/counts_28d/20210216180338_g_edit_by_snippet_ide_monthly.yml @@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric options: events: - g_edit_by_snippet_ide +events: + - name: g_edit_by_snippet_ide + unique: user.id distribution: - ce - ee diff --git a/config/metrics/counts_7d/20210216180328_g_edit_by_web_ide_weekly.yml b/config/metrics/counts_7d/20210216180328_g_edit_by_web_ide_weekly.yml index c17e5e77411..1be0ba4f96d 100644 --- a/config/metrics/counts_7d/20210216180328_g_edit_by_web_ide_weekly.yml +++ b/config/metrics/counts_7d/20210216180328_g_edit_by_web_ide_weekly.yml @@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric options: events: - g_edit_by_web_ide +events: + - name: g_edit_by_web_ide + unique: user.id distribution: - ce - ee diff --git a/config/metrics/counts_7d/20210216180332_g_edit_by_sfe_weekly.yml b/config/metrics/counts_7d/20210216180332_g_edit_by_sfe_weekly.yml index 4ce88623768..a174cfc4588 100644 --- a/config/metrics/counts_7d/20210216180332_g_edit_by_sfe_weekly.yml +++ b/config/metrics/counts_7d/20210216180332_g_edit_by_sfe_weekly.yml @@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric options: events: - g_edit_by_sfe +events: + - name: g_edit_by_sfe + unique: user.id distribution: - ce - ee diff --git a/config/metrics/counts_7d/20210216180336_g_edit_by_snippet_ide_weekly.yml b/config/metrics/counts_7d/20210216180336_g_edit_by_snippet_ide_weekly.yml index 435a5c381a0..afc3c9db8a0 100644 --- a/config/metrics/counts_7d/20210216180336_g_edit_by_snippet_ide_weekly.yml +++ b/config/metrics/counts_7d/20210216180336_g_edit_by_snippet_ide_weekly.yml @@ -13,6 +13,9 @@ instrumentation_class: RedisHLLMetric options: events: - g_edit_by_snippet_ide +events: + - name: g_edit_by_snippet_ide + unique: user.id distribution: - ce - ee diff --git a/db/migrate/20230728171609_add_ci_job_annotations_plan_limits.rb b/db/migrate/20230728171609_add_ci_job_annotations_plan_limits.rb new file mode 100644 index 00000000000..2e39280b6fe --- /dev/null +++ b/db/migrate/20230728171609_add_ci_job_annotations_plan_limits.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +class AddCiJobAnnotationsPlanLimits < Gitlab::Database::Migration[2.1] + def change + add_column :plan_limits, :ci_max_artifact_size_annotations, :integer, null: false, default: 0 + add_column :plan_limits, :ci_job_annotations_size, :integer, null: false, default: 81920 + add_column :plan_limits, :ci_job_annotations_num, :integer, null: false, default: 20 + end +end diff --git a/db/schema_migrations/20230728171609 b/db/schema_migrations/20230728171609 new file mode 100644 index 00000000000..7192de166d2 --- /dev/null +++ b/db/schema_migrations/20230728171609 @@ -0,0 +1 @@ +2e2c9416a8c60fc7273f732b2890ee84dcd864ff68a269a2ca1603e6bfaf4c31 \ No newline at end of file diff --git a/db/structure.sql b/db/structure.sql index 016e5a4a106..3200a7ca4dc 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -20444,7 +20444,10 @@ CREATE TABLE plan_limits ( google_cloud_logging_configurations integer DEFAULT 5 NOT NULL, ml_model_max_file_size bigint DEFAULT '10737418240'::bigint NOT NULL, limits_history jsonb DEFAULT '{}'::jsonb NOT NULL, - updated_at timestamp with time zone DEFAULT now() NOT NULL + updated_at timestamp with time zone DEFAULT now() NOT NULL, + ci_max_artifact_size_annotations integer DEFAULT 0 NOT NULL, + ci_job_annotations_size integer DEFAULT 81920 NOT NULL, + ci_job_annotations_num integer DEFAULT 20 NOT NULL ); CREATE SEQUENCE plan_limits_id_seq diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md index b191c9d43cf..ceec3bc5658 100644 --- a/doc/api/graphql/reference/index.md +++ b/doc/api/graphql/reference/index.md @@ -26829,6 +26829,7 @@ Iteration ID wildcard values. | Value | Description | | ----- | ----------- | | `ACCESSIBILITY` | ACCESSIBILITY job artifact file type. | +| `ANNOTATIONS` | ANNOTATIONS job artifact file type. | | `API_FUZZING` | API FUZZING job artifact file type. | | `ARCHIVE` | ARCHIVE job artifact file type. | | `BROWSER_PERFORMANCE` | BROWSER PERFORMANCE job artifact file type. | diff --git a/doc/ci/environments/kubernetes_dashboard.md b/doc/ci/environments/kubernetes_dashboard.md index f5ad78483ce..547892bf0cd 100644 --- a/doc/ci/environments/kubernetes_dashboard.md +++ b/doc/ci/environments/kubernetes_dashboard.md @@ -21,7 +21,7 @@ For Flux users, the synchronization status of a given environment is not display ## Configure a dashboard > - Filtering resources by namespace [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/403618) in GitLab 16.2 [with a flag](../../administration/feature_flags.md) named `kubernetes_namespace_for_environment`. Disabled by default. -> - Feature flag `kubernetes_namespace_for_environment` [enabled by default](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/127043) in GitLab 16.3. +> - Filtering resources by namespace [enabled by default](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/127043) in GitLab 16.3. Feature flag `kubernetes_namespace_for_environment` removed. Configure a dashboard to use it for a given environment. You can configure dashboard for an environment that already exists, or diff --git a/doc/operations/error_tracking.md b/doc/operations/error_tracking.md index ec502656bd6..926b86a5378 100644 --- a/doc/operations/error_tracking.md +++ b/doc/operations/error_tracking.md @@ -82,7 +82,7 @@ You can also review the stack trace. ### Supported language SDKs & Sentry types -In the following table, you can see a list of all event types available through Sentry SDK, and whether they are supported by GitLab Error Tracking. +The following table lists all event types available through Sentry SDK, and whether they are supported by GitLab Error Tracking. | Language | Tested SDK client and version | Endpoint | Supported item types | | -------- | ------------------------------- | ---------- | --------------------------------- | @@ -201,9 +201,7 @@ to your runner's `config.toml` configuration file, as referenced in If you're asked for the project type while setting up Sentry, select **Go**. -If you see the following error in your GitLab Runner logs, then you should -specify the deprecated -DSN in **Sentry.io > Project Settings > Client Keys (DSN) > Show deprecated DSN**. +To rectify the following error, specify the deprecated DSN in **Sentry.io > Project Settings > Client Keys (DSN) > Show deprecated DSN**. ```plaintext ERROR: Sentry failure builds=0 error=raven: dsn missing private key diff --git a/doc/tutorials/configure_gitlab_runner_to_use_gke/index.md b/doc/tutorials/configure_gitlab_runner_to_use_gke/index.md index 05340994edf..1a6a26dad2f 100644 --- a/doc/tutorials/configure_gitlab_runner_to_use_gke/index.md +++ b/doc/tutorials/configure_gitlab_runner_to_use_gke/index.md @@ -64,7 +64,7 @@ and, for autopilot clusters, to add configurations that specify which jobs to ru 1. Verify that you are connected to the cluster: ```shell - kubectl config view current-context + kubectl config current-context ``` ## Install and configure the Kubernetes Operator @@ -74,7 +74,7 @@ Now that you have a cluster, you're ready to install and configure the Kubernete 1. Install the prerequisites: ```shell - kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.7.1/cert-manager.yaml + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.7.1/cert-manager.yaml ``` 1. Install the Operator Lifecycle Manager (OLM), a tool that manages the Kubernetes Operators that diff --git a/doc/user/group/manage.md b/doc/user/group/manage.md index 308f5d20502..48685325dd0 100644 --- a/doc/user/group/manage.md +++ b/doc/user/group/manage.md @@ -233,11 +233,10 @@ For members with `Minimal Access` in the selected group, their `Max Role` and `S ## User cap for groups -> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/330027) in GitLab 14.7. +> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/330027) in GitLab 14.7 [with a flag](../../administration/feature_flags.md) named `saas_user_caps`. Disabled by default. +> - [Enabled on GitLab.com](https://gitlab.com/groups/gitlab-org/-/epics/9263) in GitLab 16.3. -FLAG: -On self-managed GitLab, this feature is not available. On GitLab.com, this feature is available for some groups. -This feature is not ready for production use. +For more information about user caps for GitLab self-managed, see [User cap](../../administration/settings/sign_up_restrictions.md#user-cap). When the number of billable members reaches the user cap, new users can't be added to the group without being approved by the group owner. @@ -301,6 +300,16 @@ To approve members that are pending because they've exceeded the user cap: 1. On the **Seats** tab, under the alert, select **View pending approvals**. 1. For each member you want to approve, select **Approve**. +### Known issues + +The user cap cannot be enabled if a group, subgroup, or project is shared externally. If a group, subgroup, +or project is shared externally, it is shared outside of the namespace hierarchy, regardless of its level +in the hierarchy. + +To ensure that the user cap applies when groups, subgroups, or projects are shared externally, restrict group sharing only within the top-level namespace. This ensure that groups in the same top-leve namespace can be invited, and prevents the addition of new users (seats) when the group is shared. + +User cap doesn’t consider whether users are billable or not (e.g., Free Guest Users in Ultimate). In other words, if you set a cap of 500, user caps block new sign-ups after 500 users, regardless of whether those are all consuming paid seats or not. + ## Group file templates **(PREMIUM)** Use group file templates to share a set of templates for common file diff --git a/doc/user/group/saml_sso/group_sync.md b/doc/user/group/saml_sso/group_sync.md index 63d3dd4f95c..53f43e2af54 100644 --- a/doc/user/group/saml_sso/group_sync.md +++ b/doc/user/group/saml_sso/group_sync.md @@ -151,7 +151,7 @@ To integrate Microsoft Azure AD, you: 1. Select **Microsoft Graph > Application permissions**. 1. Select the checkboxes **GroupMember.Read.All** and **User.Read.All**. 1. Select **Add permissions** to save. -1. Select **Grant admin consent for **, then on the confirmation dialog select **Yes**. The **Status** column for both permissions should change to a green check with **Granted for **. +1. Select **Grant admin consent for ``**, then on the confirmation dialog select **Yes**. The **Status** column for both permissions should change to a green check with **Granted for ``**. diff --git a/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs.rb b/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs.rb index 4b69ebd5ef6..952def83658 100644 --- a/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs.rb +++ b/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs.rb @@ -14,11 +14,12 @@ module Gitlab def find(timeout: nil) if ignore_alternate_directories? blobs = repository.list_all_blobs(bytes_limit: 0, dynamic_timeout: timeout, - ignore_alternate_object_directories: true) + ignore_alternate_object_directories: true).to_a - blobs.select do |blob| + blobs.select! do |blob| ::Gitlab::Utils.bytes_to_megabytes(blob.size) > file_size_limit_megabytes end + filter_existing(blobs) else any_oversize_blobs.find(timeout: timeout) end @@ -28,6 +29,15 @@ module Gitlab attr_reader :project, :repository, :changes, :file_size_limit_megabytes + def filter_existing(blobs) + gitaly_repo = repository.gitaly_repository.dup.tap { |repo| repo.git_object_directory = "" } + + map_blob_id_to_existence = repository.gitaly_commit_client.object_existence_map(blobs.map(&:id), + gitaly_repo: gitaly_repo) + + blobs.reject { |blob| map_blob_id_to_existence[blob.id].present? } + end + def ignore_alternate_directories? git_env = ::Gitlab::Git::HookEnv.all(repository.gl_repository) diff --git a/lib/gitlab/ci/config/entry/reports.rb b/lib/gitlab/ci/config/entry/reports.rb index 6408f412e6f..3c180674f2a 100644 --- a/lib/gitlab/ci/config/entry/reports.rb +++ b/lib/gitlab/ci/config/entry/reports.rb @@ -17,7 +17,7 @@ module Gitlab dast performance browser_performance load_performance license_scanning metrics lsif dotenv terraform accessibility coverage_fuzzing api_fuzzing cluster_image_scanning - requirements requirements_v2 coverage_report cyclonedx].freeze + requirements requirements_v2 coverage_report cyclonedx annotations].freeze attributes ALLOWED_KEYS @@ -50,6 +50,7 @@ module Gitlab validates :requirements, array_of_strings_or_string: true validates :requirements_v2, array_of_strings_or_string: true validates :cyclonedx, array_of_strings_or_string: true + validates :annotations, array_of_strings_or_string: true end end diff --git a/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics.rb b/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics.rb deleted file mode 100644 index 3650ddf698a..00000000000 --- a/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics.rb +++ /dev/null @@ -1,54 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Metrics - module Dashboard - module Transformers - module Yml - module V1 - # Takes a JSON schema validated dashboard hash and - # maps it to PrometheusMetric model attributes - class PrometheusMetrics - def initialize(dashboard_hash, project: nil, dashboard_path: nil) - @dashboard_hash = dashboard_hash.with_indifferent_access - @project = project - @dashboard_path = dashboard_path - - @dashboard_hash.default_proc = -> (h, k) { raise Transformers::Errors::MissingAttribute, k.to_s } - end - - def execute - prometheus_metrics = [] - - dashboard_hash[:panel_groups].each do |panel_group| - panel_group[:panels].each do |panel| - panel[:metrics].each do |metric| - prometheus_metrics << { - project: project, - title: panel[:title], - y_label: panel[:y_label], - query: metric[:query_range] || metric[:query], - unit: metric[:unit], - legend: metric[:label], - identifier: metric[:id], - group: Enums::PrometheusMetric.groups[:custom], - common: false, - dashboard_path: dashboard_path - }.compact - end - end - end - - prometheus_metrics - end - - private - - attr_reader :dashboard_hash, :project, :dashboard_path - end - end - end - end - end - end -end diff --git a/lib/gitlab/metrics/dashboard/validator.rb b/lib/gitlab/metrics/dashboard/validator.rb deleted file mode 100644 index 57b4b5c068d..00000000000 --- a/lib/gitlab/metrics/dashboard/validator.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Metrics - module Dashboard - module Validator - DASHBOARD_SCHEMA_PATH = Rails.root.join(*%w[lib gitlab metrics dashboard validator schemas dashboard.json]).freeze - - class << self - def validate(content, schema_path = DASHBOARD_SCHEMA_PATH, dashboard_path: nil, project: nil) - errors(content, schema_path, dashboard_path: dashboard_path, project: project).empty? - end - - def validate!(content, schema_path = DASHBOARD_SCHEMA_PATH, dashboard_path: nil, project: nil) - errors = errors(content, schema_path, dashboard_path: dashboard_path, project: project) - errors.empty? || raise(errors.first) - end - - private - - def errors(content, schema_path = DASHBOARD_SCHEMA_PATH, dashboard_path: nil, project: nil) - Validator::Client - .new(content, schema_path, dashboard_path: dashboard_path, project: project) - .execute - end - end - end - end - end -end diff --git a/lib/gitlab/metrics/dashboard/validator/client.rb b/lib/gitlab/metrics/dashboard/validator/client.rb deleted file mode 100644 index 29f1274a097..00000000000 --- a/lib/gitlab/metrics/dashboard/validator/client.rb +++ /dev/null @@ -1,56 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Metrics - module Dashboard - module Validator - class Client - # @param content [Hash] Representing a raw, unprocessed - # dashboard object - # @param schema_path [String] Representing path to dashboard schema file - # @param dashboard_path[String] Representing path to dashboard content file - # @param project [Project] Project to validate dashboard against - def initialize(content, schema_path, dashboard_path: nil, project: nil) - @content = content - @schema_path = schema_path - @dashboard_path = dashboard_path - @project = project - end - - def execute - errors = validate_against_schema - errors += post_schema_validator.validate - - errors.compact - end - - private - - attr_reader :content, :schema_path, :project, :dashboard_path - - def custom_formats - @custom_formats ||= CustomFormats.new - end - - def post_schema_validator - PostSchemaValidator.new( - project: project, - metric_ids: custom_formats.metric_ids_cache, - dashboard_path: dashboard_path - ) - end - - def schemer - @schemer ||= ::JSONSchemer.schema(Pathname.new(schema_path), formats: custom_formats.format_handlers) - end - - def validate_against_schema - schemer.validate(content).map do |error| - ::Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError.new(error) - end - end - end - end - end - end -end diff --git a/lib/gitlab/metrics/dashboard/validator/custom_formats.rb b/lib/gitlab/metrics/dashboard/validator/custom_formats.rb deleted file mode 100644 index 485e80ad1b7..00000000000 --- a/lib/gitlab/metrics/dashboard/validator/custom_formats.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Metrics - module Dashboard - module Validator - class CustomFormats - def format_handlers - # Key is custom JSON Schema format name. Value is a proc that takes data and schema and handles - # validations. - @format_handlers ||= { - "add_to_metric_id_cache" => ->(data, schema) { metric_ids_cache << data } - } - end - - def metric_ids_cache - @metric_ids_cache ||= [] - end - end - end - end - end -end diff --git a/lib/gitlab/metrics/dashboard/validator/errors.rb b/lib/gitlab/metrics/dashboard/validator/errors.rb deleted file mode 100644 index 0f6e687d291..00000000000 --- a/lib/gitlab/metrics/dashboard/validator/errors.rb +++ /dev/null @@ -1,60 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Metrics - module Dashboard - module Validator - module Errors - InvalidDashboardError = Class.new(StandardError) - - class SchemaValidationError < InvalidDashboardError - def initialize(error = {}) - super(error_message(error)) - end - - private - - def error_message(error) - if error.is_a?(Hash) && error.present? - pretty(error) - else - "Dashboard failed schema validation" - end - end - - # based on https://github.com/davishmcclurg/json_schemer/blob/master/lib/json_schemer/errors.rb - # with addition ability to translate error messages - def pretty(error) - data, data_pointer, type, schema = error.values_at('data', 'data_pointer', 'type', 'schema') - location = data_pointer.empty? ? 'root' : data_pointer - - case type - when 'required' - keys = error.fetch('details').fetch('missing_keys').join(', ') - _("%{location} is missing required keys: %{keys}") % { location: location, keys: keys } - when 'null', 'string', 'boolean', 'integer', 'number', 'array', 'object' - _("'%{data}' at %{location} is not of type: %{type}") % { data: data, location: location, type: type } - when 'pattern' - _("'%{data}' at %{location} does not match pattern: %{pattern}") % { data: data, location: location, pattern: schema.fetch('pattern') } - when 'format' - _("'%{data}' at %{location} does not match format: %{format}") % { data: data, location: location, format: schema.fetch('format') } - when 'const' - _("'%{data}' at %{location} is not: %{const}") % { data: data, location: location, const: schema.fetch('const').inspect } - when 'enum' - _("'%{data}' at %{location} is not one of: %{enum}") % { data: data, location: location, enum: schema.fetch('enum') } - else - _("'%{data}' at %{location} is invalid: error_type=%{type}") % { data: data, location: location, type: type } - end - end - end - - class DuplicateMetricIds < InvalidDashboardError - def initialize - super(_("metric_id must be unique across a project")) - end - end - end - end - end - end -end diff --git a/lib/gitlab/metrics/dashboard/validator/post_schema_validator.rb b/lib/gitlab/metrics/dashboard/validator/post_schema_validator.rb deleted file mode 100644 index 73bfc5a6294..00000000000 --- a/lib/gitlab/metrics/dashboard/validator/post_schema_validator.rb +++ /dev/null @@ -1,52 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Metrics - module Dashboard - module Validator - class PostSchemaValidator - def initialize(metric_ids:, project: nil, dashboard_path: nil) - @metric_ids = metric_ids - @project = project - @dashboard_path = dashboard_path - end - - def validate - errors = [] - errors << uniq_metric_ids - errors.compact - end - - private - - attr_reader :project, :metric_ids, :dashboard_path - - def uniq_metric_ids - return Validator::Errors::DuplicateMetricIds.new if metric_ids.uniq! - - uniq_metric_ids_across_project if project.present? || dashboard_path.present? - end - - # rubocop: disable CodeReuse/ActiveRecord - def uniq_metric_ids_across_project - return ArgumentError.new(_('Both project and dashboard_path are required')) unless - dashboard_path.present? && project.present? - - # If PrometheusMetric identifier is not unique across project and dashboard_path, - # we need to error because we don't know if the user is trying to create a new metric - # or update an existing one. - identifier_on_other_dashboard = PrometheusMetric.where( - project: project, - identifier: metric_ids - ).where.not( - dashboard_path: dashboard_path - ).exists? - - Validator::Errors::DuplicateMetricIds.new if identifier_on_other_dashboard - end - # rubocop: enable CodeReuse/ActiveRecord - end - end - end - end -end diff --git a/lib/gitlab/metrics/dashboard/validator/schemas/axis.json b/lib/gitlab/metrics/dashboard/validator/schemas/axis.json deleted file mode 100644 index 54334022426..00000000000 --- a/lib/gitlab/metrics/dashboard/validator/schemas/axis.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "type": "object", - "properties": { - "name": { "type": "string" }, - "format": { - "type": "string", - "default": "engineering" - }, - "precision": { - "type": "number", - "default": 2 - } - } -} diff --git a/lib/gitlab/metrics/dashboard/validator/schemas/dashboard.json b/lib/gitlab/metrics/dashboard/validator/schemas/dashboard.json deleted file mode 100644 index 313f03be7dc..00000000000 --- a/lib/gitlab/metrics/dashboard/validator/schemas/dashboard.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "type": "object", - "required": ["dashboard", "panel_groups"], - "properties": { - "dashboard": { "type": "string" }, - "panel_groups": { - "type": "array", - "items": { "$ref": "./panel_group.json" } - }, - "templating": { - "$ref": "./templating.json" - }, - "links": { - "type": "array", - "items": { "$ref": "./link.json" } - } - } -} diff --git a/lib/gitlab/metrics/dashboard/validator/schemas/link.json b/lib/gitlab/metrics/dashboard/validator/schemas/link.json deleted file mode 100644 index 4ea7b5dd324..00000000000 --- a/lib/gitlab/metrics/dashboard/validator/schemas/link.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "object", - "required": ["url"], - "properties": { - "url": { "type": "string" }, - "title": { "type": "string" }, - "type": { - "type": "string", - "enum": ["grafana"] - } - } -} diff --git a/lib/gitlab/metrics/dashboard/validator/schemas/metric.json b/lib/gitlab/metrics/dashboard/validator/schemas/metric.json deleted file mode 100644 index 13831b77e3e..00000000000 --- a/lib/gitlab/metrics/dashboard/validator/schemas/metric.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "type": "object", - "required": ["unit"], - "oneOf": [{ "required": ["query"] }, { "required": ["query_range"] }], - "properties": { - "id": { - "type": "string", - "format": "add_to_metric_id_cache" - }, - "unit": { "type": "string" }, - "label": { "type": "string" }, - "query": { "type": ["string", "number"] }, - "query_range": { "type": ["string", "number"] }, - "step": { "type": "number" } - } -} diff --git a/lib/gitlab/metrics/dashboard/validator/schemas/panel.json b/lib/gitlab/metrics/dashboard/validator/schemas/panel.json deleted file mode 100644 index 2ae9608036e..00000000000 --- a/lib/gitlab/metrics/dashboard/validator/schemas/panel.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "type": "object", - "required": ["title", "metrics"], - "properties": { - "type": { - "type": "string", - "enum": ["area-chart", "line-chart", "anomaly-chart", "bar", "column", "stacked-column", "single-stat", "heatmap", "gauge"], - "default": "area-chart" - }, - "title": { "type": "string" }, - "y_label": { "type": "string" }, - "y_axis": { "$ref": "./axis.json" }, - "max_value": { "type": "number" }, - "weight": { "type": "number" }, - "metrics": { - "type": "array", - "items": { "$ref": "./metric.json" } - }, - "links": { - "type": "array", - "items": { "$ref": "./link.json" } - } - } -} diff --git a/lib/gitlab/metrics/dashboard/validator/schemas/panel_group.json b/lib/gitlab/metrics/dashboard/validator/schemas/panel_group.json deleted file mode 100644 index 1306fc475db..00000000000 --- a/lib/gitlab/metrics/dashboard/validator/schemas/panel_group.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "object", - "required": ["group", "panels"], - "properties": { - "group": { "type": "string" }, - "priority": { "type": "number" }, - "panels": { - "type": "array", - "items": { "$ref": "./panel.json" } - } - } -} diff --git a/lib/gitlab/metrics/dashboard/validator/schemas/templating.json b/lib/gitlab/metrics/dashboard/validator/schemas/templating.json deleted file mode 100644 index 6f8664c89af..00000000000 --- a/lib/gitlab/metrics/dashboard/validator/schemas/templating.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "type": "object", - "required": ["variables"], - "properties": { - "variables": { "type": "object" } - } -} diff --git a/lib/gitlab/sidekiq_middleware/server_metrics.rb b/lib/gitlab/sidekiq_middleware/server_metrics.rb index e7c6b586da4..a8b3683e09f 100644 --- a/lib/gitlab/sidekiq_middleware/server_metrics.rb +++ b/lib/gitlab/sidekiq_middleware/server_metrics.rb @@ -11,9 +11,9 @@ module Gitlab # most of the durations for cpu, gitaly, db and elasticsearch SIDEKIQ_LATENCY_BUCKETS = [0.1, 0.5, 1, 2.5].freeze - # These are the buckets we currently use for alerting, we will likely - # replace these histograms with Application SLIs - # https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1313 + # These buckets are only available on self-managed. + # We have replaced with Application SLIs on GitLab.com. + # https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/700 SIDEKIQ_JOB_DURATION_BUCKETS = [10, 300].freeze SIDEKIQ_QUEUE_DURATION_BUCKETS = [10, 60].freeze @@ -24,15 +24,12 @@ module Gitlab include ::Gitlab::SidekiqMiddleware::MetricsHelper def metrics - { + metrics = { sidekiq_jobs_cpu_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_cpu_seconds, 'Seconds this Sidekiq job spent on the CPU', {}, SIDEKIQ_LATENCY_BUCKETS), - sidekiq_jobs_completion_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_completion_seconds, 'Seconds to complete Sidekiq job', {}, SIDEKIQ_JOB_DURATION_BUCKETS), sidekiq_jobs_db_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_db_seconds, 'Seconds of database time to run Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS), sidekiq_jobs_gitaly_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_gitaly_seconds, 'Seconds of Gitaly time to run Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS), - sidekiq_jobs_queue_duration_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_queue_duration_seconds, 'Duration in seconds that a Sidekiq job was queued before being executed', {}, SIDEKIQ_QUEUE_DURATION_BUCKETS), sidekiq_redis_requests_duration_seconds: ::Gitlab::Metrics.histogram(:sidekiq_redis_requests_duration_seconds, 'Duration in seconds that a Sidekiq job spent requests a Redis server', {}, Gitlab::Instrumentation::Redis::QUERY_TIME_BUCKETS), sidekiq_elasticsearch_requests_duration_seconds: ::Gitlab::Metrics.histogram(:sidekiq_elasticsearch_requests_duration_seconds, 'Duration in seconds that a Sidekiq job spent in requests to an Elasticsearch server', {}, SIDEKIQ_LATENCY_BUCKETS), - sidekiq_jobs_failed_total: ::Gitlab::Metrics.counter(:sidekiq_jobs_failed_total, 'Sidekiq jobs failed'), sidekiq_jobs_retried_total: ::Gitlab::Metrics.counter(:sidekiq_jobs_retried_total, 'Sidekiq jobs retried'), sidekiq_jobs_interrupted_total: ::Gitlab::Metrics.counter(:sidekiq_jobs_interrupted_total, 'Sidekiq jobs interrupted'), sidekiq_redis_requests_total: ::Gitlab::Metrics.counter(:sidekiq_redis_requests_total, 'Redis requests during a Sidekiq job execution'), @@ -41,6 +38,17 @@ module Gitlab sidekiq_concurrency: ::Gitlab::Metrics.gauge(:sidekiq_concurrency, 'Maximum number of Sidekiq jobs', {}, :all), sidekiq_mem_total_bytes: ::Gitlab::Metrics.gauge(:sidekiq_mem_total_bytes, 'Number of bytes allocated for both objects consuming an object slot and objects that required a malloc', {}, :all) } + + if Feature.enabled?(:emit_sidekiq_histogram_metrics, type: :ops) + metrics[:sidekiq_jobs_completion_seconds] = ::Gitlab::Metrics.histogram(:sidekiq_jobs_completion_seconds, 'Seconds to complete Sidekiq job', {}, SIDEKIQ_JOB_DURATION_BUCKETS) + metrics[:sidekiq_jobs_queue_duration_seconds] = ::Gitlab::Metrics.histogram(:sidekiq_jobs_queue_duration_seconds, 'Duration in seconds that a Sidekiq job was queued before being executed', {}, SIDEKIQ_QUEUE_DURATION_BUCKETS) + metrics[:sidekiq_jobs_failed_total] = ::Gitlab::Metrics.counter(:sidekiq_jobs_failed_total, 'Sidekiq jobs failed') + else + # The sum metric is still used in GitLab.com for dashboards + metrics[:sidekiq_jobs_completion_seconds_sum] = ::Gitlab::Metrics.counter(:sidekiq_jobs_completion_seconds_sum, 'Total of seconds to complete Sidekiq job') + end + + metrics end def initialize_process_metrics @@ -59,6 +67,8 @@ module Gitlab base_labels = create_labels(worker_class, queue, {}) possible_sli_labels << base_labels.slice(*SIDEKIQ_SLI_LABELS) + next unless Feature.enabled?(:emit_sidekiq_histogram_metrics, type: :ops) + %w[done fail].each do |status| metrics[:sidekiq_jobs_completion_seconds].get(base_labels.merge(job_status: status)) end @@ -92,7 +102,8 @@ module Gitlab def instrument(job, labels) queue_duration = ::Gitlab::InstrumentationHelper.queue_duration_for_job(job) - @metrics[:sidekiq_jobs_queue_duration_seconds].observe(labels, queue_duration) if queue_duration + @metrics[:sidekiq_jobs_queue_duration_seconds]&.observe(labels, queue_duration) if queue_duration + @metrics[:sidekiq_running_jobs].increment(labels, 1) if job['retry_count'].present? @@ -119,13 +130,21 @@ module Gitlab # sidekiq_running_jobs, sidekiq_jobs_failed_total should not include the job_status label @metrics[:sidekiq_running_jobs].increment(labels, -1) - @metrics[:sidekiq_jobs_failed_total].increment(labels, 1) unless job_succeeded + + if Feature.enabled?(:emit_sidekiq_histogram_metrics, type: :ops) + @metrics[:sidekiq_jobs_failed_total].increment(labels, 1) unless job_succeeded + else + # we don't need job_status label here + @metrics[:sidekiq_jobs_completion_seconds_sum].increment(labels, monotonic_time) + end # job_status: done, fail match the job_status attribute in structured logging labels[:job_status] = job_succeeded ? "done" : "fail" instrumentation = job[:instrumentation] || {} @metrics[:sidekiq_jobs_cpu_seconds].observe(labels, job_thread_cputime) - @metrics[:sidekiq_jobs_completion_seconds].observe(labels, monotonic_time) + + @metrics[:sidekiq_jobs_completion_seconds]&.observe(labels, monotonic_time) + @metrics[:sidekiq_jobs_db_seconds].observe(labels, ActiveRecord::LogSubscriber.runtime / 1000) @metrics[:sidekiq_jobs_gitaly_seconds].observe(labels, get_gitaly_time(instrumentation)) @metrics[:sidekiq_redis_requests_total].increment(labels, get_redis_calls(instrumentation)) diff --git a/lib/gitlab/usage_data_counters/editor_unique_counter.rb b/lib/gitlab/usage_data_counters/editor_unique_counter.rb index 4e4a01ed301..7955c19b7e6 100644 --- a/lib/gitlab/usage_data_counters/editor_unique_counter.rb +++ b/lib/gitlab/usage_data_counters/editor_unique_counter.rb @@ -9,24 +9,24 @@ module Gitlab EDIT_CATEGORY = 'ide_edit' class << self - def track_web_ide_edit_action(author:, time: Time.zone.now, project:) - track_unique_action(EDIT_BY_WEB_IDE, author, time, project) + def track_web_ide_edit_action(author:, project:) + track_internal_event(EDIT_BY_WEB_IDE, author, project) end def count_web_ide_edit_actions(date_from:, date_to:) count_unique(EDIT_BY_WEB_IDE, date_from, date_to) end - def track_sfe_edit_action(author:, time: Time.zone.now, project:) - track_unique_action(EDIT_BY_SFE, author, time, project) + def track_sfe_edit_action(author:, project:) + track_internal_event(EDIT_BY_SFE, author, project) end def count_sfe_edit_actions(date_from:, date_to:) count_unique(EDIT_BY_SFE, date_from, date_to) end - def track_snippet_editor_edit_action(author:, time: Time.zone.now, project:) - track_unique_action(EDIT_BY_SNIPPET_EDITOR, author, time, project) + def track_snippet_editor_edit_action(author:, project:) + track_internal_event(EDIT_BY_SNIPPET_EDITOR, author, project) end def count_snippet_editor_edit_actions(date_from:, date_to:) @@ -35,21 +35,15 @@ module Gitlab private - def track_unique_action(event_name, author, time, project = nil) + def track_internal_event(event_name, author, project = nil) return unless author - Gitlab::Tracking.event( - name, - 'ide_edit', - property: event_name.to_s, - project: project, - namespace: project&.namespace, + Gitlab::InternalEvents.track_event( + event_name, user: author, - label: 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit', - context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_context] + project: project, + namespace: project&.namespace ) - - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(event_name, values: author.id, time: time) end def count_unique(actions, date_from, date_to) diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 6910926d991..3c93808323d 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -895,9 +895,6 @@ msgstr "" msgid "%{listToShow}, and %{awardsListLength} more" msgstr "" -msgid "%{location} is missing required keys: %{keys}" -msgstr "" - msgid "%{lock_path} is locked by GitLab User %{lock_user_id}" msgstr "" @@ -1334,24 +1331,6 @@ msgstr "" msgid "%{wildcards_link_start}Wildcards%{wildcards_link_end} such as %{code_tag_start}v*%{code_tag_end} or %{code_tag_start}*-release%{code_tag_end} are supported." msgstr "" -msgid "'%{data}' at %{location} does not match format: %{format}" -msgstr "" - -msgid "'%{data}' at %{location} does not match pattern: %{pattern}" -msgstr "" - -msgid "'%{data}' at %{location} is invalid: error_type=%{type}" -msgstr "" - -msgid "'%{data}' at %{location} is not of type: %{type}" -msgstr "" - -msgid "'%{data}' at %{location} is not one of: %{enum}" -msgstr "" - -msgid "'%{data}' at %{location} is not: %{const}" -msgstr "" - msgid "'%{level}' is not a valid visibility level" msgstr "" @@ -8181,9 +8160,6 @@ msgstr "" msgid "Both SSH and HTTP(S)" msgstr "" -msgid "Both project and dashboard_path are required" -msgstr "" - msgid "Branch" msgstr "" @@ -8577,6 +8553,9 @@ msgstr "" msgid "BroadcastMessages|Indigo" msgstr "" +msgid "BroadcastMessages|Leave blank to target all group and project pages." +msgstr "" + msgid "BroadcastMessages|Light" msgstr "" @@ -8604,12 +8583,18 @@ msgstr "" msgid "BroadcastMessages|Notification" msgstr "" -msgid "BroadcastMessages|Paths can contain wildcards, like */welcome" +msgid "BroadcastMessages|One or more roles is required." +msgstr "" + +msgid "BroadcastMessages|Paths can contain wildcards, like */welcome." msgstr "" msgid "BroadcastMessages|Red" msgstr "" +msgid "BroadcastMessages|Select at least one role." +msgstr "" + msgid "BroadcastMessages|Show only to users who have specific roles on groups/project pages" msgstr "" @@ -9945,12 +9930,18 @@ msgstr "" msgid "CiCatalog|About this project" msgstr "" +msgid "CiCatalog|Back to the CI/CD Catalog" +msgstr "" + msgid "CiCatalog|CI/CD Catalog" msgstr "" msgid "CiCatalog|CI/CD Catalog resource" msgstr "" +msgid "CiCatalog|Component ID not found, or you do not have permission to access component." +msgstr "" + msgid "CiCatalog|Create a pipeline component repository and make reusing pipeline configurations faster and easier." msgstr "" @@ -9969,6 +9960,9 @@ msgstr "" msgid "CiCatalog|Mark project as a CI/CD Catalog resource. %{linkStart}What is the CI/CD Catalog?%{linkEnd}" msgstr "" +msgid "CiCatalog|No component available" +msgstr "" + msgid "CiCatalog|No release available" msgstr "" @@ -13127,6 +13121,39 @@ msgstr "" msgid "ContributionEvent|Removed due to membership expiration from %{resourceParentLink}." msgstr "" +msgid "ContributionEvent|Reopened Epic %{targetLink} in %{resourceParentLink}." +msgstr "" + +msgid "ContributionEvent|Reopened incident %{targetLink} in %{resourceParentLink}." +msgstr "" + +msgid "ContributionEvent|Reopened issue %{targetLink} in %{resourceParentLink}." +msgstr "" + +msgid "ContributionEvent|Reopened key result %{targetLink} in %{resourceParentLink}." +msgstr "" + +msgid "ContributionEvent|Reopened merge request %{targetLink} in %{resourceParentLink}." +msgstr "" + +msgid "ContributionEvent|Reopened milestone %{targetLink} in %{resourceParentLink}." +msgstr "" + +msgid "ContributionEvent|Reopened objective %{targetLink} in %{resourceParentLink}." +msgstr "" + +msgid "ContributionEvent|Reopened requirement %{targetLink} in %{resourceParentLink}." +msgstr "" + +msgid "ContributionEvent|Reopened resource." +msgstr "" + +msgid "ContributionEvent|Reopened task %{targetLink} in %{resourceParentLink}." +msgstr "" + +msgid "ContributionEvent|Reopened test case %{targetLink} in %{resourceParentLink}." +msgstr "" + msgid "ContributionEvent|…and %{count} more commits. %{linkStart}Compare%{linkEnd}." msgstr "" @@ -55905,9 +55932,6 @@ msgstr[1] "" msgid "mergedCommitsAdded| (commits were squashed)" msgstr "" -msgid "metric_id must be unique across a project" -msgstr "" - msgid "milestone" msgstr "" diff --git a/package.json b/package.json index cc9924cc606..1bf2e454476 100644 --- a/package.json +++ b/package.json @@ -252,7 +252,7 @@ "custom-jquery-matchers": "^2.1.0", "eslint": "8.46.0", "eslint-import-resolver-jest": "3.0.2", - "eslint-import-resolver-webpack": "0.13.2", + "eslint-import-resolver-webpack": "0.13.4", "eslint-plugin-import": "^2.28.0", "eslint-plugin-no-jquery": "2.7.0", "eslint-plugin-no-unsanitized": "^4.0.2", diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb index 5e049e0375b..1c418f646f6 100644 --- a/spec/factories/ci/job_artifacts.rb +++ b/spec/factories/ci/job_artifacts.rb @@ -478,5 +478,15 @@ FactoryBot.define do artifact.file_sha256 = Digest::SHA256.file(artifact.file.path).hexdigest end end + + trait :annotations do + file_type { :annotations } + file_format { :gzip } + + after(:build) do |artifact, evaluator| + artifact.file = fixture_file_upload( + Rails.root.join('spec/fixtures/gl-annotations.json.gz'), 'application/x-gzip') + end + end end end diff --git a/spec/fixtures/gl-annotations.json.gz b/spec/fixtures/gl-annotations.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..493b51f224391e12abd0fdc09cc958186d6b324f GIT binary patch literal 124 zcmV-?0E7P@iwFP!000001FPmzP*6&(C`m2KOU#MS$;`_xRuRD(%$d7v~* z0w|S}n3S3W6jBNe@=-8U(m}|S7Jkkc{Q3 { const findShowInCli = () => wrapper.findComponent('[data-testid=show-in-cli-checkbox]'); const findTargetSelect = () => wrapper.findComponent('[data-testid=target-select]'); const findTargetPath = () => wrapper.findComponent('[data-testid=target-path-input]'); + const emitSubmitForm = () => findForm().vm.$emit('submit', { preventDefault: () => {} }); function createComponent({ broadcastMessage = {} } = {}) { wrapper = mount(MessageForm, { @@ -79,7 +80,7 @@ describe('MessageForm', () => { it('renders the placeholder text when the user message is blank', () => { createComponent({ broadcastMessage: { message: ' ' } }); - expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.messagePlaceholder); + expect(wrapper.text()).toContain(MessageForm.i18n.messagePlaceholder); }); }); @@ -129,13 +130,18 @@ describe('MessageForm', () => { it('triggers displaying target path and target roles when selecting different options', async () => { createComponent(); + const targetPath = findTargetPath(); const options = findTargetSelect().findAll('option'); await options.at(1).setSelected(); - expect(findTargetPath().isVisible()).toBe(true); + expect(targetPath.isVisible()).toBe(true); + expect(targetPath.text()).toContain(MessageForm.i18n.targetPathDescription); + expect(targetPath.text()).not.toContain(MessageForm.i18n.targetPathWithRolesReminder); expect(findTargetRoles().isVisible()).toBe(false); await options.at(2).setSelected(); - expect(findTargetPath().isVisible()).toBe(true); + expect(targetPath.isVisible()).toBe(true); + expect(targetPath.text()).toContain(MessageForm.i18n.targetPathDescription); + expect(targetPath.text()).toContain(MessageForm.i18n.targetPathWithRolesReminder); expect(findTargetRoles().isVisible()).toBe(true); }); @@ -157,12 +163,12 @@ describe('MessageForm', () => { describe('form submit button', () => { it('renders the "add" text when the message is not persisted', () => { createComponent({ broadcastMessage: { id: undefined } }); - expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.add); + expect(wrapper.text()).toContain(MessageForm.i18n.add); }); it('renders the "update" text when the message is persisted', () => { createComponent({ broadcastMessage: { id: 100 } }); - expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.update); + expect(wrapper.text()).toContain(MessageForm.i18n.update); }); it('is disabled when the user message is blank', () => { @@ -196,56 +202,86 @@ describe('MessageForm', () => { ends_at: defaultProps.endsAt, }; - it('sends a create request for a new message form', async () => { - createComponent({ broadcastMessage: { id: undefined } }); - findForm().vm.$emit('submit', { preventDefault: () => {} }); - await waitForPromises(); + describe('when creating a new message', () => { + beforeEach(() => { + createComponent({ broadcastMessage: { id: undefined } }); + }); - expect(axiosMock.history.post).toHaveLength(2); - expect(axiosMock.history.post[1]).toMatchObject({ - url: messagesPath, - data: JSON.stringify(defaultPayload), + it('sends a create request for a new message form', async () => { + emitSubmitForm(); + await waitForPromises(); + + expect(axiosMock.history.post).toHaveLength(2); + expect(axiosMock.history.post[1]).toMatchObject({ + url: messagesPath, + data: JSON.stringify(defaultPayload), + }); + }); + + it('shows an error alert if the create request fails', async () => { + axiosMock.onPost(messagesPath).replyOnce(HTTP_STATUS_BAD_REQUEST); + emitSubmitForm(); + await waitForPromises(); + + expect(createAlert).toHaveBeenCalledWith( + expect.objectContaining({ + message: MessageForm.i18n.addError, + }), + ); }); }); - it('shows an error alert if the create request fails', async () => { - createComponent({ broadcastMessage: { id: undefined } }); - axiosMock.onPost(messagesPath).replyOnce(HTTP_STATUS_BAD_REQUEST); - findForm().vm.$emit('submit', { preventDefault: () => {} }); - await waitForPromises(); + describe('when editing an existing message', () => { + const mockId = 1337; - expect(createAlert).toHaveBeenCalledWith( - expect.objectContaining({ - message: wrapper.vm.$options.i18n.addError, - }), - ); - }); - - it('sends an update request for a persisted message form', async () => { - const id = 1337; - createComponent({ broadcastMessage: { id } }); - findForm().vm.$emit('submit', { preventDefault: () => {} }); - await waitForPromises(); - - expect(axiosMock.history.patch).toHaveLength(1); - expect(axiosMock.history.patch[0]).toMatchObject({ - url: `${messagesPath}/${id}`, - data: JSON.stringify(defaultPayload), + beforeEach(() => { + createComponent({ broadcastMessage: { id: mockId } }); }); - }); - it('shows an error alert if the update request fails', async () => { - const id = 1337; - createComponent({ broadcastMessage: { id } }); - axiosMock.onPost(`${messagesPath}/${id}`).replyOnce(HTTP_STATUS_BAD_REQUEST); - findForm().vm.$emit('submit', { preventDefault: () => {} }); - await waitForPromises(); + it('sends an update request for a persisted message form', async () => { + emitSubmitForm(); + await waitForPromises(); - expect(createAlert).toHaveBeenCalledWith( - expect.objectContaining({ - message: wrapper.vm.$options.i18n.updateError, - }), - ); + expect(axiosMock.history.patch).toHaveLength(1); + expect(axiosMock.history.patch[0]).toMatchObject({ + url: `${messagesPath}/${mockId}`, + data: JSON.stringify(defaultPayload), + }); + }); + + it('shows an error alert if the update request fails', async () => { + axiosMock.onPost(`${messagesPath}/${mockId}`).replyOnce(HTTP_STATUS_BAD_REQUEST); + emitSubmitForm(); + await waitForPromises(); + + expect(createAlert).toHaveBeenCalledWith( + expect.objectContaining({ + message: MessageForm.i18n.updateError, + }), + ); + }); + + it('does not submit if target roles is required, and later does submit when validation is corrected', async () => { + const options = findTargetSelect().findAll('option'); + await options.at(2).setSelected(); + + emitSubmitForm(); + await waitForPromises(); + + expect(axiosMock.history.patch).toHaveLength(0); + expect(wrapper.text()).toContain(MessageForm.i18n.targetRolesValidationMsg); + + await findTargetRoles().find('input[type="checkbox"]').setChecked(); + + emitSubmitForm(); + await waitForPromises(); + + expect(axiosMock.history.patch).toHaveLength(1); + expect(axiosMock.history.patch[0]).toMatchObject({ + url: `${messagesPath}/${mockId}`, + data: JSON.stringify({ ...defaultPayload, target_access_levels: [10] }), + }); + }); }); }); }); diff --git a/spec/frontend/contribution_events/components/contribution_event/contribution_event_reopened_spec.js b/spec/frontend/contribution_events/components/contribution_event/contribution_event_reopened_spec.js new file mode 100644 index 00000000000..87f3080a98f --- /dev/null +++ b/spec/frontend/contribution_events/components/contribution_event/contribution_event_reopened_spec.js @@ -0,0 +1,60 @@ +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import ContributionEventReopened from '~/contribution_events/components/contribution_event/contribution_event_reopened.vue'; +import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue'; +import { TARGET_TYPE_WORK_ITEM } from '~/contribution_events/constants'; +import { + eventMilestoneReopened, + eventIssueReopened, + eventMergeRequestReopened, + eventTaskReopened, + eventIncidentReopened, +} from '../../utils'; + +describe('ContributionEventReopened', () => { + let wrapper; + + const createComponent = ({ propsData }) => { + wrapper = shallowMountExtended(ContributionEventReopened, { + propsData, + }); + }; + + describe.each` + event | expectedMessage | iconName + ${eventMilestoneReopened()} | ${'Reopened milestone %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} + ${eventIssueReopened()} | ${'Reopened issue %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} + ${eventMergeRequestReopened()} | ${'Reopened merge request %{targetLink} in %{resourceParentLink}.'} | ${'merge-request-open'} + ${{ target: { type: 'unsupported type' } }} | ${'Reopened resource.'} | ${'status_open'} + `('when event target type is $event.target.type', ({ event, expectedMessage, iconName }) => { + it('renders `ContributionEventBase` with correct props', () => { + createComponent({ propsData: { event } }); + + expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({ + event, + message: expectedMessage, + iconName, + iconClass: 'gl-text-green-500', + }); + }); + }); + + describe(`when event target type is ${TARGET_TYPE_WORK_ITEM}`, () => { + describe.each` + event | expectedMessage + ${eventTaskReopened()} | ${'Reopened task %{targetLink} in %{resourceParentLink}.'} + ${eventIncidentReopened()} | ${'Reopened incident %{targetLink} in %{resourceParentLink}.'} + ${{ target: { type: TARGET_TYPE_WORK_ITEM, issue_type: 'unsupported type' } }} | ${'Reopened resource.'} + `('when issue type is $event.target.issue_type', ({ event, expectedMessage }) => { + it('renders `ContributionEventBase` with correct props', () => { + createComponent({ propsData: { event } }); + + expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({ + event, + message: expectedMessage, + iconName: 'status_open', + iconClass: 'gl-text-green-500', + }); + }); + }); + }); +}); diff --git a/spec/frontend/contribution_events/components/contribution_events_spec.js b/spec/frontend/contribution_events/components/contribution_events_spec.js index d8fecdb5e8b..8722eeca4b4 100644 --- a/spec/frontend/contribution_events/components/contribution_events_spec.js +++ b/spec/frontend/contribution_events/components/contribution_events_spec.js @@ -9,6 +9,7 @@ import ContributionEventPrivate from '~/contribution_events/components/contribut import ContributionEventMerged from '~/contribution_events/components/contribution_event/contribution_event_merged.vue'; import ContributionEventCreated from '~/contribution_events/components/contribution_event/contribution_event_created.vue'; import ContributionEventClosed from '~/contribution_events/components/contribution_event/contribution_event_closed.vue'; +import ContributionEventReopened from '~/contribution_events/components/contribution_event/contribution_event_reopened.vue'; import { eventApproved, eventExpired, @@ -19,6 +20,7 @@ import { eventMerged, eventCreated, eventClosed, + eventReopened, } from '../utils'; describe('ContributionEvents', () => { @@ -37,6 +39,7 @@ describe('ContributionEvents', () => { eventMerged(), eventCreated(), eventClosed(), + eventReopened(), ], }, }); @@ -53,6 +56,7 @@ describe('ContributionEvents', () => { ${ContributionEventMerged} | ${eventMerged()} ${ContributionEventCreated} | ${eventCreated()} ${ContributionEventClosed} | ${eventClosed()} + ${ContributionEventReopened} | ${eventReopened()} `( 'renders `$expectedComponent.name` component and passes expected event', ({ expectedComponent, expectedEvent }) => { diff --git a/spec/frontend/contribution_events/utils.js b/spec/frontend/contribution_events/utils.js index dd92f6c317a..e24c81d9256 100644 --- a/spec/frontend/contribution_events/utils.js +++ b/spec/frontend/contribution_events/utils.js @@ -8,6 +8,7 @@ import { EVENT_TYPE_PRIVATE, EVENT_TYPE_MERGED, EVENT_TYPE_CLOSED, + EVENT_TYPE_REOPENED, PUSH_EVENT_REF_TYPE_BRANCH, PUSH_EVENT_REF_TYPE_TAG, EVENT_TYPE_CREATED, @@ -16,7 +17,7 @@ import { TARGET_TYPE_MERGE_REQUEST, TARGET_TYPE_WIKI, TARGET_TYPE_DESIGN, - TARGET_TYPE_WORK_ITEM, + WORK_ITEM_ISSUE_TYPE_ISSUE, WORK_ITEM_ISSUE_TYPE_TASK, WORK_ITEM_ISSUE_TYPE_INCIDENT, } from '~/contribution_events/constants'; @@ -25,12 +26,7 @@ const findEventByAction = (action) => () => events.find((event) => event.action const findEventByActionAndTargetType = (action, targetType) => () => events.find((event) => event.action === action && event.target?.type === targetType); const findEventByActionAndIssueType = (action, issueType) => () => - events.find( - (event) => - event.action === action && - event.target?.type === TARGET_TYPE_WORK_ITEM && - event.target.issue_type === issueType, - ); + events.find((event) => event.action === action && event.target.issue_type === issueType); export const eventApproved = findEventByAction(EVENT_TYPE_APPROVED); @@ -100,3 +96,18 @@ export const eventWikiPageClosed = findClosedEvent(TARGET_TYPE_WIKI); export const eventDesignClosed = findClosedEvent(TARGET_TYPE_DESIGN); export const eventTaskClosed = findWorkItemClosedEvent(WORK_ITEM_ISSUE_TYPE_TASK); export const eventIncidentClosed = findWorkItemClosedEvent(WORK_ITEM_ISSUE_TYPE_INCIDENT); + +export const eventReopened = findEventByAction(EVENT_TYPE_REOPENED); + +export const findReopenedEvent = (targetType) => + findEventByActionAndTargetType(EVENT_TYPE_REOPENED, targetType); +export const findWorkItemReopenedEvent = (issueType) => + findEventByActionAndIssueType(EVENT_TYPE_REOPENED, issueType); + +export const eventMilestoneReopened = findReopenedEvent(TARGET_TYPE_MILESTONE); +export const eventMergeRequestReopened = findReopenedEvent(TARGET_TYPE_MERGE_REQUEST); +export const eventWikiPageReopened = findReopenedEvent(TARGET_TYPE_WIKI); +export const eventDesignReopened = findReopenedEvent(TARGET_TYPE_DESIGN); +export const eventIssueReopened = findWorkItemReopenedEvent(WORK_ITEM_ISSUE_TYPE_ISSUE); +export const eventTaskReopened = findWorkItemReopenedEvent(WORK_ITEM_ISSUE_TYPE_TASK); +export const eventIncidentReopened = findWorkItemReopenedEvent(WORK_ITEM_ISSUE_TYPE_INCIDENT); diff --git a/spec/frontend/contribution_events/utils_spec.js b/spec/frontend/contribution_events/utils_spec.js new file mode 100644 index 00000000000..298f5ae652a --- /dev/null +++ b/spec/frontend/contribution_events/utils_spec.js @@ -0,0 +1,24 @@ +import { TARGET_TYPE_MILESTONE, WORK_ITEM_ISSUE_TYPE_TASK } from '~/contribution_events/constants'; +import { getValueByEventTarget } from '~/contribution_events/utils'; +import { eventMilestoneCreated, eventTaskCreated } from './utils'; + +describe('getValueByEventTarget', () => { + const milestoneValue = 'milestone'; + const taskValue = 'task'; + const fallbackValue = 'fallback'; + + const map = { + [TARGET_TYPE_MILESTONE]: milestoneValue, + [WORK_ITEM_ISSUE_TYPE_TASK]: taskValue, + fallback: fallbackValue, + }; + + it.each` + event | expected + ${eventMilestoneCreated()} | ${milestoneValue} + ${eventTaskCreated()} | ${taskValue} + ${{ target: { type: 'unsupported type' } }} | ${fallbackValue} + `('returns $expected when event is $event', ({ event, expected }) => { + expect(getValueByEventTarget(map, event)).toBe(expected); + }); +}); diff --git a/spec/frontend/environments/edit_environment_spec.js b/spec/frontend/environments/edit_environment_spec.js index 93fe9ed9400..b8bb76f95c9 100644 --- a/spec/frontend/environments/edit_environment_spec.js +++ b/spec/frontend/environments/edit_environment_spec.js @@ -7,7 +7,6 @@ import EditEnvironment from '~/environments/components/edit_environment.vue'; import { createAlert } from '~/alert'; import { visitUrl } from '~/lib/utils/url_utility'; import getEnvironment from '~/environments/graphql/queries/environment.query.graphql'; -import getEnvironmentWithNamespace from '~/environments/graphql/queries/environment_with_namespace.graphql'; import updateEnvironment from '~/environments/graphql/mutations/update_environment.mutation.graphql'; import { __ } from '~/locale'; import createMockApollo from '../__helpers__/mock_apollo_helper'; @@ -43,9 +42,6 @@ describe('~/environments/components/edit.vue', () => { let wrapper; const getEnvironmentQuery = jest.fn().mockResolvedValue({ data: resolvedEnvironment }); - const getEnvironmentWithNamespaceQuery = jest - .fn() - .mockResolvedValue({ data: resolvedEnvironment }); const updateEnvironmentSuccess = jest .fn() @@ -59,24 +55,17 @@ describe('~/environments/components/edit.vue', () => { const mocks = [ [getEnvironment, getEnvironmentQuery], - [getEnvironmentWithNamespace, getEnvironmentWithNamespaceQuery], [updateEnvironment, mutationHandler], ]; return createMockApollo(mocks); }; - const createWrapperWithApollo = async ({ - mutationHandler = updateEnvironmentSuccess, - kubernetesNamespaceForEnvironment = false, - } = {}) => { + const createWrapperWithApollo = async ({ mutationHandler = updateEnvironmentSuccess } = {}) => { wrapper = mountExtended(EditEnvironment, { propsData: { environment: {} }, provide: { ...provide, - glFeatures: { - kubernetesNamespaceForEnvironment, - }, }, apolloProvider: createMockApolloProvider(mutationHandler), }); @@ -169,11 +158,4 @@ describe('~/environments/components/edit.vue', () => { }); }); }); - - describe('when `kubernetesNamespaceForEnvironment` is enabled', () => { - it('calls the `getEnvironmentWithNamespace` query', () => { - createWrapperWithApollo({ kubernetesNamespaceForEnvironment: true }); - expect(getEnvironmentWithNamespaceQuery).toHaveBeenCalled(); - }); - }); }); diff --git a/spec/frontend/environments/environment_form_spec.js b/spec/frontend/environments/environment_form_spec.js index 803207bcce8..9cc05d577ac 100644 --- a/spec/frontend/environments/environment_form_spec.js +++ b/spec/frontend/environments/environment_form_spec.js @@ -42,11 +42,7 @@ describe('~/environments/components/form.vue', () => { }, }); - const createWrapperWithApollo = ({ - propsData = {}, - kubernetesNamespaceForEnvironment = false, - queryResult = null, - } = {}) => { + const createWrapperWithApollo = ({ propsData = {}, queryResult = null } = {}) => { Vue.use(VueApollo); const requestHandlers = [ @@ -72,9 +68,6 @@ describe('~/environments/components/form.vue', () => { return mountExtended(EnvironmentForm, { provide: { ...PROVIDE, - glFeatures: { - kubernetesNamespaceForEnvironment, - }, }, propsData: { ...DEFAULT_PROPS, @@ -296,127 +289,117 @@ describe('~/environments/components/form.vue', () => { }); describe('namespace selector', () => { - it("doesn't render namespace selector if `kubernetesNamespaceForEnvironment` feature flag is disabled", () => { + beforeEach(() => { wrapper = createWrapperWithApollo(); + }); + + it("doesn't render namespace selector by default", () => { expect(findNamespaceSelector().exists()).toBe(false); }); - describe('when `kubernetesNamespaceForEnvironment` feature flag is enabled', () => { - beforeEach(() => { - wrapper = createWrapperWithApollo({ - kubernetesNamespaceForEnvironment: true, - }); + describe('when the agent was selected', () => { + beforeEach(async () => { + await selectAgent(); }); - it("doesn't render namespace selector by default", () => { + it('renders namespace selector', () => { + expect(findNamespaceSelector().exists()).toBe(true); + }); + + it('requests the kubernetes namespaces with the correct configuration', async () => { + const configuration = { + basePath: mockKasTunnelUrl.replace(/\/$/, ''), + baseOptions: { + headers: { + 'GitLab-Agent-Id': 2, + }, + withCredentials: true, + }, + }; + + await waitForPromises(); + + expect(getNamespacesQueryResult).toHaveBeenCalledWith( + {}, + { configuration }, + expect.anything(), + expect.anything(), + ); + }); + + it('sets the loading prop while fetching the list', async () => { + expect(findNamespaceSelector().props('loading')).toBe(true); + + await waitForPromises(); + + expect(findNamespaceSelector().props('loading')).toBe(false); + }); + + it('renders a list of available namespaces', async () => { + await waitForPromises(); + + expect(findNamespaceSelector().props('items')).toEqual([ + { text: 'default', value: 'default' }, + { text: 'agent', value: 'agent' }, + ]); + }); + + it('filters the namespaces list on user search', async () => { + await waitForPromises(); + await findNamespaceSelector().vm.$emit('search', 'default'); + + expect(findNamespaceSelector().props('items')).toEqual([ + { value: 'default', text: 'default' }, + ]); + }); + + it('updates namespace selector field with the name of selected namespace', async () => { + await waitForPromises(); + await findNamespaceSelector().vm.$emit('select', 'agent'); + + expect(findNamespaceSelector().props('toggleText')).toBe('agent'); + }); + + it('emits changes to the kubernetesNamespace', async () => { + await waitForPromises(); + await findNamespaceSelector().vm.$emit('select', 'agent'); + + expect(wrapper.emitted('change')[1]).toEqual([ + { name: '', externalUrl: '', kubernetesNamespace: 'agent' }, + ]); + }); + + it('clears namespace selector when another agent was selected', async () => { + await waitForPromises(); + await findNamespaceSelector().vm.$emit('select', 'agent'); + + expect(findNamespaceSelector().props('toggleText')).toBe('agent'); + + await findAgentSelector().vm.$emit('select', '1'); + expect(findNamespaceSelector().props('toggleText')).toBe( + EnvironmentForm.i18n.namespaceHelpText, + ); + }); + }); + + describe('when cannot connect to the cluster', () => { + const error = new Error('Error from the cluster_client API'); + + beforeEach(async () => { + wrapper = createWrapperWithApollo({ + queryResult: jest.fn().mockRejectedValueOnce(error), + }); + + await selectAgent(); + await waitForPromises(); + }); + + it("doesn't render the namespace selector", () => { expect(findNamespaceSelector().exists()).toBe(false); }); - describe('when the agent was selected', () => { - beforeEach(async () => { - await selectAgent(); - }); - - it('renders namespace selector', () => { - expect(findNamespaceSelector().exists()).toBe(true); - }); - - it('requests the kubernetes namespaces with the correct configuration', async () => { - const configuration = { - basePath: mockKasTunnelUrl.replace(/\/$/, ''), - baseOptions: { - headers: { - 'GitLab-Agent-Id': 2, - }, - withCredentials: true, - }, - }; - - await waitForPromises(); - - expect(getNamespacesQueryResult).toHaveBeenCalledWith( - {}, - { configuration }, - expect.anything(), - expect.anything(), - ); - }); - - it('sets the loading prop while fetching the list', async () => { - expect(findNamespaceSelector().props('loading')).toBe(true); - - await waitForPromises(); - - expect(findNamespaceSelector().props('loading')).toBe(false); - }); - - it('renders a list of available namespaces', async () => { - await waitForPromises(); - - expect(findNamespaceSelector().props('items')).toEqual([ - { text: 'default', value: 'default' }, - { text: 'agent', value: 'agent' }, - ]); - }); - - it('filters the namespaces list on user search', async () => { - await waitForPromises(); - await findNamespaceSelector().vm.$emit('search', 'default'); - - expect(findNamespaceSelector().props('items')).toEqual([ - { value: 'default', text: 'default' }, - ]); - }); - - it('updates namespace selector field with the name of selected namespace', async () => { - await waitForPromises(); - await findNamespaceSelector().vm.$emit('select', 'agent'); - - expect(findNamespaceSelector().props('toggleText')).toBe('agent'); - }); - - it('emits changes to the kubernetesNamespace', async () => { - await waitForPromises(); - await findNamespaceSelector().vm.$emit('select', 'agent'); - - expect(wrapper.emitted('change')[1]).toEqual([ - { name: '', externalUrl: '', kubernetesNamespace: 'agent' }, - ]); - }); - - it('clears namespace selector when another agent was selected', async () => { - await waitForPromises(); - await findNamespaceSelector().vm.$emit('select', 'agent'); - - expect(findNamespaceSelector().props('toggleText')).toBe('agent'); - - await findAgentSelector().vm.$emit('select', '1'); - expect(findNamespaceSelector().props('toggleText')).toBe( - EnvironmentForm.i18n.namespaceHelpText, - ); - }); - }); - - describe('when cannot connect to the cluster', () => { - const error = new Error('Error from the cluster_client API'); - - beforeEach(async () => { - wrapper = createWrapperWithApollo({ - kubernetesNamespaceForEnvironment: true, - queryResult: jest.fn().mockRejectedValueOnce(error), - }); - - await selectAgent(); - await waitForPromises(); - }); - - it("doesn't render the namespace selector", () => { - expect(findNamespaceSelector().exists()).toBe(false); - }); - - it('renders an alert', () => { - expect(findAlert().text()).toBe('Error from the cluster_client API'); - }); + it('renders an alert', () => { + expect(findAlert().text()).toBe('Error from the cluster_client API'); }); }); }); @@ -430,7 +413,6 @@ describe('~/environments/components/form.vue', () => { beforeEach(() => { wrapper = createWrapperWithApollo({ propsData: { environment: environmentWithAgent }, - kubernetesNamespaceForEnvironment: true, }); }); @@ -463,7 +445,6 @@ describe('~/environments/components/form.vue', () => { beforeEach(() => { wrapper = createWrapperWithApollo({ propsData: { environment: environmentWithAgentAndNamespace }, - kubernetesNamespaceForEnvironment: true, }); }); diff --git a/spec/frontend/environments/new_environment_item_spec.js b/spec/frontend/environments/new_environment_item_spec.js index 2f319855bca..0b1a4784cfa 100644 --- a/spec/frontend/environments/new_environment_item_spec.js +++ b/spec/frontend/environments/new_environment_item_spec.js @@ -13,7 +13,6 @@ import Deployment from '~/environments/components/deployment.vue'; import DeployBoardWrapper from '~/environments/components/deploy_board_wrapper.vue'; import KubernetesOverview from '~/environments/components/kubernetes_overview.vue'; import getEnvironmentClusterAgent from '~/environments/graphql/queries/environment_cluster_agent.query.graphql'; -import getEnvironmentClusterAgentWithNamespace from '~/environments/graphql/queries/environment_cluster_agent_with_namespace.query.graphql'; import { resolvedEnvironment, rolloutStatus, agent } from './graphql/mock_data'; import { mockKasTunnelUrl } from './mock_data'; @@ -22,7 +21,6 @@ Vue.use(VueApollo); describe('~/environments/components/new_environment_item.vue', () => { let wrapper; let queryResponseHandler; - let queryWithNamespaceResponseHandler; const projectPath = '/1'; @@ -33,27 +31,15 @@ describe('~/environments/components/new_environment_item.vue', () => { id: '1', environment: { id: '1', + kubernetesNamespace: 'default', clusterAgent, }, }, }, }; queryResponseHandler = jest.fn().mockResolvedValue(response); - queryWithNamespaceResponseHandler = jest.fn().mockResolvedValue({ - data: { - project: { - id: response.data.project.id, - environment: { - ...response.data.project.environment, - kubernetesNamespace: 'default', - }, - }, - }, - }); - return createMockApollo([ - [getEnvironmentClusterAgent, queryResponseHandler], - [getEnvironmentClusterAgentWithNamespace, queryWithNamespaceResponseHandler], - ]); + + return createMockApollo([[getEnvironmentClusterAgent, queryResponseHandler]]); }; const createWrapper = ({ propsData = {}, provideData = {}, apolloProvider } = {}) => @@ -548,25 +534,6 @@ describe('~/environments/components/new_environment_item.vue', () => { }); }); - it('should request agent data with kubernetes namespace when `kubernetesNamespaceForEnvironment` feature flag is enabled', async () => { - wrapper = createWrapper({ - propsData: { environment: resolvedEnvironment }, - provideData: { - glFeatures: { - kubernetesNamespaceForEnvironment: true, - }, - }, - apolloProvider: createApolloProvider(agent), - }); - - await expandCollapsedSection(); - - expect(queryWithNamespaceResponseHandler).toHaveBeenCalledWith({ - environmentName: resolvedEnvironment.name, - projectFullPath: projectPath, - }); - }); - it('should render if the environment has an agent associated', async () => { wrapper = createWrapper({ propsData: { environment: resolvedEnvironment }, @@ -579,26 +546,6 @@ describe('~/environments/components/new_environment_item.vue', () => { expect(findKubernetesOverview().props()).toMatchObject({ clusterAgent: agent, environmentName: resolvedEnvironment.name, - }); - }); - - it('should render with the namespace if `kubernetesNamespaceForEnvironment` feature flag is enabled and the environment has an agent associated', async () => { - wrapper = createWrapper({ - propsData: { environment: resolvedEnvironment }, - provideData: { - glFeatures: { - kubernetesNamespaceForEnvironment: true, - }, - }, - apolloProvider: createApolloProvider(agent), - }); - - await expandCollapsedSection(); - await waitForPromises(); - - expect(findKubernetesOverview().props()).toEqual({ - clusterAgent: agent, - environmentName: resolvedEnvironment.name, namespace: 'default', }); }); diff --git a/spec/frontend/fixtures/users.rb b/spec/frontend/fixtures/users.rb index 24e424b0b72..4bdf9160083 100644 --- a/spec/frontend/fixtures/users.rb +++ b/spec/frontend/fixtures/users.rb @@ -44,7 +44,7 @@ RSpec.describe 'Users (JavaScript fixtures)', feature_category: :user_profile do end it 'controller/users/activity.json' do - get :activity, params: { username: user.username, limit: 50 }, format: :json + get :activity, params: { username: user.username, limit: 100 }, format: :json expect(response).to be_successful end diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js index 6b711b6b6b2..431ede17954 100644 --- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js +++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js @@ -7,15 +7,22 @@ import LineHighlighter from '~/blob/line_highlighter'; import addBlobLinksTracking from '~/blob/blob_links_tracking'; import { BLOB_DATA_MOCK, CHUNK_1, CHUNK_2, LANGUAGE_MOCK } from './mock_data'; -jest.mock('~/blob/line_highlighter'); +const lineHighlighter = new LineHighlighter(); +jest.mock('~/blob/line_highlighter', () => + jest.fn().mockReturnValue({ + highlightHash: jest.fn(), + }), +); jest.mock('~/blob/blob_links_tracking'); describe('Source Viewer component', () => { let wrapper; const CHUNKS_MOCK = [CHUNK_1, CHUNK_2]; + const hash = '#L142'; const createComponent = () => { wrapper = shallowMountExtended(SourceViewer, { + mocks: { $route: { hash } }, propsData: { blob: BLOB_DATA_MOCK, chunks: CHUNKS_MOCK }, }); }; @@ -48,4 +55,10 @@ describe('Source Viewer component', () => { expect(findChunks().at(1).props()).toMatchObject(CHUNK_2); }); }); + + describe('hash highlighting', () => { + it('calls highlightHash with expected parameter', () => { + expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash); + }); + }); }); diff --git a/spec/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs_spec.rb b/spec/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs_spec.rb index 7aae3f59a4b..bea0c02cfb8 100644 --- a/spec/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs_spec.rb +++ b/spec/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBlobs, feature_category: :source_code_management do let_it_be(:project) { create(:project, :small_repo) } + let(:repository) { project.repository } let(:file_size_limit) { 1 } let(:any_quarantined_blobs) do described_class.new( @@ -37,7 +38,7 @@ RSpec.describe Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBl end before do - allow(Gitlab::Git::HookEnv).to receive(:all).with(project.repository.gl_repository).and_return(git_env) + allow(Gitlab::Git::HookEnv).to receive(:all).with(repository.gl_repository).and_return(git_env) end it 'returns an emtpy array' do @@ -47,9 +48,25 @@ RSpec.describe Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBl context 'when the file is over the limit' do let(:file_size_limit) { 0 } - it 'returns an array with the blobs that are over the limit' do - expect(subject.size).to eq(1) - expect(subject.first).to be_kind_of(Gitlab::Git::Blob) + context 'when the blob does not exist in the repo' do + before do + allow(repository.gitaly_commit_client).to receive(:object_existence_map).and_return(Hash.new { false }) + end + + it 'returns an array with the blobs that are over the limit' do + expect(subject.size).to eq(1) + expect(subject.first).to be_kind_of(Gitlab::Git::Blob) + end + end + + context 'when the blob exists in the repo' do + before do + allow(repository.gitaly_commit_client).to receive(:object_existence_map).and_return(Hash.new { true }) + end + + it 'filters out the blobs in the repo' do + expect(subject).to eq([]) + end end end end diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb index 73bf2d422b7..d610c3ce2f6 100644 --- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb @@ -48,6 +48,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports, feature_category: :pipeline_c :terraform | 'tfplan.json' :accessibility | 'gl-accessibility.json' :cyclonedx | 'gl-sbom.cdx.zip' + :annotations | 'gl-annotations.json' end with_them do diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 6d01f480cd0..e24200fc8d6 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -433,6 +433,7 @@ builds: - dast_scanner_profiles_build - dast_scanner_profile - job_annotations +- job_artifacts_annotations bridges: - user - pipeline diff --git a/spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb b/spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb deleted file mode 100644 index 3af8b51c889..00000000000 --- a/spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb +++ /dev/null @@ -1,99 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Dashboard::Transformers::Yml::V1::PrometheusMetrics do - include MetricsDashboardHelpers - - describe '#execute' do - subject { described_class.new(dashboard_hash) } - - context 'valid dashboard' do - let_it_be(:dashboard_hash) do - { - panel_groups: [{ - panels: [ - { - title: 'Panel 1 title', - y_label: 'Panel 1 y_label', - metrics: [ - { - query_range: 'Panel 1 metric 1 query_range', - unit: 'Panel 1 metric 1 unit', - label: 'Panel 1 metric 1 label', - id: 'Panel 1 metric 1 id' - }, - { - query: 'Panel 1 metric 2 query', - unit: 'Panel 1 metric 2 unit', - label: 'Panel 1 metric 2 label', - id: 'Panel 1 metric 2 id' - } - ] - }, - { - title: 'Panel 2 title', - y_label: 'Panel 2 y_label', - metrics: [{ - query_range: 'Panel 2 metric 1 query_range', - unit: 'Panel 2 metric 1 unit', - label: 'Panel 2 metric 1 label', - id: 'Panel 2 metric 1 id' - }] - } - ] - }] - } - end - - let(:expected_metrics) do - [ - { - title: 'Panel 1 title', - y_label: 'Panel 1 y_label', - query: "Panel 1 metric 1 query_range", - unit: 'Panel 1 metric 1 unit', - legend: 'Panel 1 metric 1 label', - identifier: 'Panel 1 metric 1 id', - group: 3, - common: false - }, - { - title: 'Panel 1 title', - y_label: 'Panel 1 y_label', - query: 'Panel 1 metric 2 query', - unit: 'Panel 1 metric 2 unit', - legend: 'Panel 1 metric 2 label', - identifier: 'Panel 1 metric 2 id', - group: 3, - common: false - }, - { - title: 'Panel 2 title', - y_label: 'Panel 2 y_label', - query: 'Panel 2 metric 1 query_range', - unit: 'Panel 2 metric 1 unit', - legend: 'Panel 2 metric 1 label', - identifier: 'Panel 2 metric 1 id', - group: 3, - common: false - } - ] - end - - it 'returns collection of metrics with correct attributes' do - expect(subject.execute).to match_array(expected_metrics) - end - end - - context 'invalid dashboard' do - let(:dashboard_hash) { {} } - - it 'raises missing attribute error' do - expect { subject.execute }.to raise_error( - ::Gitlab::Metrics::Dashboard::Transformers::Errors::MissingAttribute, "Missing attribute: 'panel_groups'" - ) - end - end - end -end diff --git a/spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb deleted file mode 100644 index 4b07f9dbbab..00000000000 --- a/spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Dashboard::Validator::Client do - include MetricsDashboardHelpers - - let_it_be(:schema_path) { 'lib/gitlab/metrics/dashboard/validator/schemas/dashboard.json' } - - subject { described_class.new(dashboard, schema_path) } - - describe '#execute' do - context 'with no validation errors' do - let(:dashboard) { load_sample_dashboard } - - it 'returns empty array' do - expect(subject.execute).to eq([]) - end - end - - context 'with validation errors' do - let(:dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/invalid_dashboard.yml')) } - - it 'returns array of error objects' do - expect(subject.execute).to include(Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError) - end - end - end -end diff --git a/spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb deleted file mode 100644 index 129fb631f3e..00000000000 --- a/spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb +++ /dev/null @@ -1,15 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Dashboard::Validator::CustomFormats do - describe '#format_handlers' do - describe 'add_to_metric_id_cache' do - it 'adds data to metric id cache' do - subject.format_handlers['add_to_metric_id_cache'].call('metric_id', '_schema') - - expect(subject.metric_ids_cache).to eq(["metric_id"]) - end - end - end -end diff --git a/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb deleted file mode 100644 index a50c2a506cb..00000000000 --- a/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb +++ /dev/null @@ -1,149 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Dashboard::Validator::Errors do - describe Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError do - context 'empty error hash' do - let(:error_hash) { {} } - - it 'uses default error message' do - expect(described_class.new(error_hash).message).to eq('Dashboard failed schema validation') - end - end - - context 'formatted message' do - subject { described_class.new(error_hash).message } - - let(:error_hash) do - { - 'data' => 'property_name', - 'data_pointer' => pointer, - 'type' => type, - 'schema' => 'schema', - 'details' => details - } - end - - context 'for root object' do - let(:pointer) { '' } - - context 'when required keys are missing' do - let(:type) { 'required' } - let(:details) { { 'missing_keys' => ['one'] } } - - it { is_expected.to eq 'root is missing required keys: one' } - end - - context 'when there is type mismatch' do - %w(null string boolean integer number array object).each do |expected_type| - context "on type: #{expected_type}" do - let(:type) { expected_type } - let(:details) { nil } - - it { is_expected.to eq "'property_name' at root is not of type: #{expected_type}" } - end - end - end - end - - context 'for nested object' do - let(:pointer) { '/nested_objects/0' } - - context 'when required keys are missing' do - let(:type) { 'required' } - let(:details) { { 'missing_keys' => ['two'] } } - - it { is_expected.to eq '/nested_objects/0 is missing required keys: two' } - end - - context 'when there is type mismatch' do - %w(null string boolean integer number array object).each do |expected_type| - context "on type: #{expected_type}" do - let(:type) { expected_type } - let(:details) { nil } - - it { is_expected.to eq "'property_name' at /nested_objects/0 is not of type: #{expected_type}" } - end - end - end - - context 'when data does not match pattern' do - let(:type) { 'pattern' } - let(:error_hash) do - { - 'data' => 'property_name', - 'data_pointer' => pointer, - 'type' => type, - 'schema' => { 'pattern' => 'aa.*' } - } - end - - it { is_expected.to eq "'property_name' at /nested_objects/0 does not match pattern: aa.*" } - end - - context 'when data does not match format' do - let(:type) { 'format' } - let(:error_hash) do - { - 'data' => 'property_name', - 'data_pointer' => pointer, - 'type' => type, - 'schema' => { 'format' => 'date-time' } - } - end - - it { is_expected.to eq "'property_name' at /nested_objects/0 does not match format: date-time" } - end - - context 'when data is not const' do - let(:type) { 'const' } - let(:error_hash) do - { - 'data' => 'property_name', - 'data_pointer' => pointer, - 'type' => type, - 'schema' => { 'const' => 'one' } - } - end - - it { is_expected.to eq "'property_name' at /nested_objects/0 is not: \"one\"" } - end - - context 'when data is not included in enum' do - let(:type) { 'enum' } - let(:error_hash) do - { - 'data' => 'property_name', - 'data_pointer' => pointer, - 'type' => type, - 'schema' => { 'enum' => %w(one two) } - } - end - - it { is_expected.to eq "'property_name' at /nested_objects/0 is not one of: [\"one\", \"two\"]" } - end - - context 'when data is not included in enum' do - let(:type) { 'unknown' } - let(:error_hash) do - { - 'data' => 'property_name', - 'data_pointer' => pointer, - 'type' => type, - 'schema' => 'schema' - } - end - - it { is_expected.to eq "'property_name' at /nested_objects/0 is invalid: error_type=unknown" } - end - end - end - end - - describe Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds do - it 'has custom error message' do - expect(described_class.new.message).to eq('metric_id must be unique across a project') - end - end -end diff --git a/spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb deleted file mode 100644 index e7cb1429ca9..00000000000 --- a/spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb +++ /dev/null @@ -1,78 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Dashboard::Validator::PostSchemaValidator do - describe '#validate' do - context 'with no project and dashboard_path provided' do - context 'unique local metric_ids' do - it 'returns empty array' do - expect(described_class.new(metric_ids: [1, 2, 3]).validate).to eq([]) - end - end - - context 'duplicate local metrics_ids' do - it 'returns error' do - expect(described_class.new(metric_ids: [1, 1]).validate) - .to eq([Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds]) - end - end - end - - context 'with project and dashboard_path' do - let(:project) { create(:project) } - - subject do - described_class.new( - project: project, - metric_ids: ['some_identifier'], - dashboard_path: 'test/path.yml' - ).validate - end - - context 'with unique metric identifiers' do - before do - create(:prometheus_metric, - project: project, - identifier: 'some_other_identifier', - dashboard_path: 'test/path.yml' - ) - end - - it 'returns empty array' do - expect(subject).to eq([]) - end - end - - context 'duplicate metric identifiers in database' do - context 'with different dashboard_path' do - before do - create(:prometheus_metric, - project: project, - identifier: 'some_identifier', - dashboard_path: 'some/other/path.yml' - ) - end - - it 'returns error' do - expect(subject).to include(Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds) - end - end - - context 'with same dashboard_path' do - before do - create(:prometheus_metric, - project: project, - identifier: 'some_identifier', - dashboard_path: 'test/path.yml' - ) - end - - it 'returns empty array' do - expect(subject).to eq([]) - end - end - end - end - end -end diff --git a/spec/lib/gitlab/metrics/dashboard/validator_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator_spec.rb deleted file mode 100644 index fb55b736354..00000000000 --- a/spec/lib/gitlab/metrics/dashboard/validator_spec.rb +++ /dev/null @@ -1,146 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Dashboard::Validator do - include MetricsDashboardHelpers - - let_it_be(:valid_dashboard) { load_sample_dashboard } - let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/invalid_dashboard.yml')) } - let_it_be(:duplicate_id_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/duplicate_id_dashboard.yml')) } - - let_it_be(:project) { create(:project) } - - describe '#validate' do - context 'valid dashboard schema' do - it 'returns true' do - expect(described_class.validate(valid_dashboard)).to be true - end - - context 'with duplicate metric_ids' do - it 'returns false' do - expect(described_class.validate(duplicate_id_dashboard)).to be false - end - end - - context 'with dashboard_path and project' do - subject { described_class.validate(valid_dashboard, dashboard_path: 'test/path.yml', project: project) } - - context 'with no conflicting metric identifiers in db' do - it { is_expected.to be true } - end - - context 'with metric identifier present in current dashboard' do - before do - create(:prometheus_metric, - identifier: 'metric_a1', - dashboard_path: 'test/path.yml', - project: project - ) - end - - it { is_expected.to be true } - end - - context 'with metric identifier present in another dashboard' do - before do - create(:prometheus_metric, - identifier: 'metric_a1', - dashboard_path: 'some/other/dashboard/path.yml', - project: project - ) - end - - it { is_expected.to be false } - end - end - end - - context 'invalid dashboard schema' do - it 'returns false' do - expect(described_class.validate(invalid_dashboard)).to be false - end - end - end - - describe '#validate!' do - shared_examples 'validation failed' do |errors_message| - it 'raises error with corresponding messages', :aggregate_failures do - expect { subject }.to raise_error do |error| - expect(error).to be_kind_of(Gitlab::Metrics::Dashboard::Validator::Errors::InvalidDashboardError) - expect(error.message).to eq(errors_message) - end - end - end - - context 'valid dashboard schema' do - it 'returns true' do - expect(described_class.validate!(valid_dashboard)).to be true - end - - context 'with duplicate metric_ids' do - subject { described_class.validate!(duplicate_id_dashboard) } - - it_behaves_like 'validation failed', 'metric_id must be unique across a project' - end - - context 'with dashboard_path and project' do - subject { described_class.validate!(valid_dashboard, dashboard_path: 'test/path.yml', project: project) } - - context 'with no conflicting metric identifiers in db' do - it { is_expected.to be true } - end - - context 'with metric identifier present in current dashboard' do - before do - create(:prometheus_metric, - identifier: 'metric_a1', - dashboard_path: 'test/path.yml', - project: project - ) - end - - it { is_expected.to be true } - end - - context 'with metric identifier present in another dashboard' do - before do - create(:prometheus_metric, - identifier: 'metric_a1', - dashboard_path: 'some/other/dashboard/path.yml', - project: project - ) - end - - it_behaves_like 'validation failed', 'metric_id must be unique across a project' - end - end - end - - context 'invalid dashboard schema' do - subject { described_class.validate!(invalid_dashboard) } - - context 'wrong property type' do - it_behaves_like 'validation failed', "'this_should_be_a_int' at /panel_groups/0/panels/0/weight is not of type: number" - end - - context 'panel groups missing' do - let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_missing_panel_groups.yml')) } - - it_behaves_like 'validation failed', 'root is missing required keys: panel_groups' - end - - context 'groups are missing panels and group keys' do - let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_groups_missing_panels_and_group.yml')) } - - it_behaves_like 'validation failed', '/panel_groups/0 is missing required keys: group' - end - - context 'panel is missing metrics key' do - let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_panel_is_missing_metrics.yml')) } - - it_behaves_like 'validation failed', '/panel_groups/0/panels/0 is missing required keys: metrics' - end - end - end -end diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb index e674d37ece6..0cbf9eab3d8 100644 --- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb @@ -59,6 +59,19 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do described_class.initialize_process_metrics end + context 'when emit_sidekiq_histogram FF is disabled' do + before do + stub_feature_flags(emit_sidekiq_histogram_metrics: false) + allow(Gitlab::SidekiqConfig).to receive(:current_worker_queue_mappings).and_return('MergeWorker' => 'merge') + end + + it 'does not initialize sidekiq_jobs_completion_seconds' do + expect(completion_seconds_metric).not_to receive(:get) + + described_class.initialize_process_metrics + end + end + shared_examples "not initializing sidekiq SLIs" do it 'does not initialize sidekiq SLIs' do expect(Gitlab::Metrics::SidekiqSlis) @@ -441,5 +454,53 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do end end end + + context 'when emit_sidekiq_histogram_metrics FF is disabled' do + include_context 'server metrics with mocked prometheus' + include_context 'server metrics call' do + let(:stub_subject) { false } + end + + subject(:middleware) { described_class.new } + + let(:job) { {} } + let(:queue) { :test } + let(:worker_class) do + Class.new do + def self.name + "TestWorker" + end + include ApplicationWorker + end + end + + let(:worker) { worker_class.new } + let(:labels) do + { queue: queue.to_s, + worker: worker.class.name, + boundary: "", + external_dependencies: "no", + feature_category: "", + urgency: "low" } + end + + before do + stub_feature_flags(emit_sidekiq_histogram_metrics: false) + end + + it 'does not emit histogram metrics' do + expect(completion_seconds_metric).not_to receive(:observe) + expect(queue_duration_seconds).not_to receive(:observe) + expect(failed_total_metric).not_to receive(:increment) + + middleware.call(worker, job, queue) { nil } + end + + it 'emits sidekiq_jobs_completion_seconds_sum metric' do + expect(completion_seconds_sum_metric).to receive(:increment).with(labels, monotonic_time_duration) + + middleware.call(worker, job, queue) { nil } + end + end end # rubocop: enable RSpec/MultipleMemoizedHelpers diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb index 19236cdbba0..ab92b59c845 100644 --- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb @@ -3,41 +3,31 @@ require 'spec_helper' RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_redis_shared_state do - let(:user1) { build(:user, id: 1) } + let(:user) { build(:user, id: 1) } let(:user2) { build(:user, id: 2) } let(:user3) { build(:user, id: 3) } let(:project) { build(:project) } + let(:namespace) { project.namespace } let(:time) { Time.zone.now } shared_examples 'tracks and counts action' do + subject { track_action(author: user, project: project) } + before do stub_application_setting(usage_ping_enabled: true) end specify do aggregate_failures do - expect(track_action(author: user1, project: project)).to be_truthy + expect(track_action(author: user, project: project)).to be_truthy expect(track_action(author: user2, project: project)).to be_truthy - expect(track_action(author: user3, time: time.end_of_week - 3.days, project: project)).to be_truthy + expect(track_action(author: user3, project: project)).to be_truthy expect(count_unique(date_from: time.beginning_of_week, date_to: 1.week.from_now)).to eq(3) end end - it 'track snowplow event' do - track_action(author: user1, project: project) - - expect_snowplow_event( - category: described_class.name, - action: 'ide_edit', - label: 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit', - namespace: project.namespace, - property: event_name, - project: project, - user: user1, - context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_h] - ) - end + it_behaves_like 'internal event tracking' it 'does not track edit actions if author is not present' do expect(track_action(author: nil, project: project)).to be_nil @@ -45,7 +35,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red end context 'for web IDE edit actions' do - let(:event_name) { described_class::EDIT_BY_WEB_IDE } + let(:action) { described_class::EDIT_BY_WEB_IDE } it_behaves_like 'tracks and counts action' do def track_action(params) @@ -59,7 +49,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red end context 'for SFE edit actions' do - let(:event_name) { described_class::EDIT_BY_SFE } + let(:action) { described_class::EDIT_BY_SFE } it_behaves_like 'tracks and counts action' do def track_action(params) @@ -73,7 +63,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red end context 'for snippet editor edit actions' do - let(:event_name) { described_class::EDIT_BY_SNIPPET_EDITOR } + let(:action) { described_class::EDIT_BY_SNIPPET_EDITOR } it_behaves_like 'tracks and counts action' do def track_action(params) diff --git a/spec/models/ci/job_annotation_spec.rb b/spec/models/ci/job_annotation_spec.rb index f94494bc91d..465c168b714 100644 --- a/spec/models/ci/job_annotation_spec.rb +++ b/spec/models/ci/job_annotation_spec.rb @@ -11,7 +11,6 @@ RSpec.describe Ci::JobAnnotation, feature_category: :build_artifacts do it { is_expected.to belong_to(:job).class_name('Ci::Build').inverse_of(:job_annotations) } it { is_expected.to validate_presence_of(:name) } it { is_expected.to validate_length_of(:name).is_at_most(255) } - it { is_expected.to validate_uniqueness_of(:name).scoped_to([:job_id, :partition_id]) } end describe '.create' do diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb index c9b2e3e6b23..c6af7609778 100644 --- a/spec/models/ci/processable_spec.rb +++ b/spec/models/ci/processable_spec.rb @@ -78,7 +78,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do job_artifacts_requirements job_artifacts_coverage_fuzzing job_artifacts_requirements_v2 job_artifacts_api_fuzzing terraform_state_versions job_artifacts_cyclonedx - job_annotations].freeze + job_annotations job_artifacts_annotations].freeze end let(:ignore_accessors) do diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb index 2ab5fb715b8..d10f375788a 100644 --- a/spec/models/plan_limits_spec.rb +++ b/spec/models/plan_limits_spec.rb @@ -248,6 +248,7 @@ RSpec.describe PlanLimits do ci_max_artifact_size_requirements_v2 ci_max_artifact_size_coverage_fuzzing ci_max_artifact_size_api_fuzzing + ci_max_artifact_size_annotations ] end diff --git a/spec/models/pool_repository_spec.rb b/spec/models/pool_repository_spec.rb index 9861e832bef..93c1e59458d 100644 --- a/spec/models/pool_repository_spec.rb +++ b/spec/models/pool_repository_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe PoolRepository do +RSpec.describe PoolRepository, feature_category: :source_code_management do describe 'associations' do it { is_expected.to belong_to(:shard) } it { is_expected.to belong_to(:source_project) } @@ -16,12 +16,43 @@ RSpec.describe PoolRepository do it { is_expected.to validate_presence_of(:source_project) } end + describe 'scopes' do + let_it_be(:project1) { create(:project) } + let_it_be(:project2) { create(:project) } + let_it_be(:new_shard) { create(:shard, name: 'new') } + let_it_be(:pool_repository1) { create(:pool_repository, source_project: project1) } + let_it_be(:pool_repository2) { create(:pool_repository, source_project: project1, shard: new_shard) } + let_it_be(:another_pool_repository) { create(:pool_repository, source_project: project2) } + + describe '.by_source_project' do + subject { described_class.by_source_project(project1) } + + it 'returns pool repositories per source project from all shards' do + is_expected.to match_array([pool_repository1, pool_repository2]) + end + end + + describe '.by_source_project_and_shard_name' do + subject { described_class.by_source_project_and_shard_name(project1, new_shard.name) } + + it 'returns only a requested pool repository' do + is_expected.to match_array([pool_repository2]) + end + end + end + describe '#disk_path' do it 'sets the hashed disk_path' do pool = create(:pool_repository) expect(pool.disk_path).to match(%r{\A@pools/\h{2}/\h{2}/\h{64}}) end + + it 'keeps disk_path if already provided' do + pool = create(:pool_repository, disk_path: '@pools/aa/bbbb') + + expect(pool.disk_path).to eq('@pools/aa/bbbb') + end end describe '#unlink_repository' do diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index e138d7a4c1b..a9b704608ef 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -3045,6 +3045,34 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr shard_name: 'foo' ) end + + it 'refreshes a memoized repository value' do + previous_repository = project.repository + + allow(project).to receive(:disk_path).and_return('fancy/new/path') + allow(project).to receive(:repository_storage).and_return('foo') + + project.track_project_repository + + expect(project.repository).not_to eq(previous_repository) + end + + context 'when "replicate_object_pool_on_move" FF is disabled' do + before do + stub_feature_flags(replicate_object_pool_on_move: false) + end + + it 'does not update a memoized repository value' do + previous_repository = project.repository + + allow(project).to receive(:disk_path).and_return('fancy/new/path') + allow(project).to receive(:repository_storage).and_return('foo') + + project.track_project_repository + + expect(project.repository).to eq(previous_repository) + end + end end end @@ -6951,6 +6979,73 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr end end + describe '#swap_pool_repository!' do + subject(:swap_pool_repository!) { project.swap_pool_repository! } + + let_it_be_with_reload(:project) { create(:project, :empty_repo) } + let_it_be(:shard_to) { create(:shard, name: 'test_second_storage') } + + let!(:pool1) { create(:pool_repository, source_project: project) } + let!(:pool2) { create(:pool_repository, shard: shard_to, source_project: project) } + let(:project_pool) { pool1 } + let(:repository_storage) { shard_to.name } + + before do + stub_storage_settings( + 'test_second_storage' => { + 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address, + 'path' => TestEnv::SECOND_STORAGE_PATH + } + ) + + project.update!(pool_repository: project_pool, repository_storage: repository_storage) + end + + shared_examples 'no pool repository swap' do + it 'does not change pool repository for the project' do + expect { swap_pool_repository! }.not_to change { project.reload.pool_repository } + end + end + + it 'moves project to the new pool repository' do + expect { swap_pool_repository! }.to change { project.reload.pool_repository }.from(pool1).to(pool2) + end + + context 'when feature flag replicate_object_pool_on_move is disabled' do + before do + stub_feature_flags(replicate_object_pool_on_move: false) + end + + it_behaves_like 'no pool repository swap' + end + + context 'when repository does not exist' do + let(:project) { build(:project) } + + it_behaves_like 'no pool repository swap' + end + + context 'when project does not have a pool repository' do + let(:project_pool) { nil } + + it_behaves_like 'no pool repository swap' + end + + context 'when project pool is on the same shard as repository' do + let(:project_pool) { pool2 } + + it_behaves_like 'no pool repository swap' + end + + context 'when pool repository for shard is missing' do + let(:pool2) { nil } + + it 'raises record not found error' do + expect { swap_pool_repository! }.to raise_error(ActiveRecord::RecordNotFound) + end + end + end + describe '#leave_pool_repository' do let(:pool) { create(:pool_repository) } let(:project) { create(:project, :repository, pool_repository: pool) } @@ -6978,6 +7073,53 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr end end + describe '#link_pool_repository' do + let(:pool) { create(:pool_repository) } + let(:project) { build(:project, :empty_repo, pool_repository: pool) } + + subject { project.link_pool_repository } + + it 'links pool repository to project repository' do + expect(pool).to receive(:link_repository).with(project.repository) + + subject + end + + context 'when pool repository is missing' do + let(:pool) { nil } + + it 'does not link anything' do + allow_next_instance_of(PoolRepository) do |pool_repository| + expect(pool_repository).not_to receive(:link_repository) + end + + subject + end + end + + context 'when pool repository is on the different shard as project repository' do + let(:pool) { create(:pool_repository, shard: create(:shard, name: 'new')) } + + it 'does not link anything' do + expect(pool).not_to receive(:link_repository) + + subject + end + + context 'when feature flag replicate_object_pool_on_move is disabled' do + before do + stub_feature_flags(replicate_object_pool_on_move: false) + end + + it 'links pool repository to project repository' do + expect(pool).to receive(:link_repository).with(project.repository) + + subject + end + end + end + end + describe '#check_personal_projects_limit' do context 'when creating a project for a group' do it 'does nothing' do diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb index 28126f1bdc2..687ce333ca5 100644 --- a/spec/requests/api/commits_spec.rb +++ b/spec/requests/api/commits_spec.rb @@ -573,13 +573,9 @@ RSpec.describe API::Commits, feature_category: :source_code_management do subject end - it_behaves_like 'Snowplow event tracking with RedisHLL context' do + it_behaves_like 'internal event tracking' do + let(:action) { ::Gitlab::UsageDataCounters::EditorUniqueCounter::EDIT_BY_WEB_IDE } let(:namespace) { project.namespace.reload } - let(:category) { 'Gitlab::UsageDataCounters::EditorUniqueCounter' } - let(:action) { 'ide_edit' } - let(:property) { 'g_edit_by_web_ide' } - let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit' } - let(:context) { [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_context] } end context 'counts.web_ide_commits Snowplow event tracking' do diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb index 7c5ab691b51..06594d89338 100644 --- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb +++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb @@ -188,16 +188,10 @@ RSpec.describe 'Updating a Snippet', feature_category: :source_code_management d stub_session('warden.user.user.key' => [[current_user.id], current_user.authenticatable_salt]) end - it_behaves_like 'Snowplow event tracking with RedisHLL context' do + it_behaves_like 'internal event tracking' do + let(:action) { ::Gitlab::UsageDataCounters::EditorUniqueCounter::EDIT_BY_SNIPPET_EDITOR } let(:user) { current_user } - let(:property) { 'g_edit_by_snippet_ide' } let(:namespace) { project.namespace } - let(:category) { 'Gitlab::UsageDataCounters::EditorUniqueCounter' } - let(:action) { 'ide_edit' } - let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit' } - let(:context) do - [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_context] - end end end end diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb index 7e471bf39a1..a23ba250daf 100644 --- a/spec/services/ci/job_artifacts/create_service_spec.rb +++ b/spec/services/ci/job_artifacts/create_service_spec.rb @@ -321,6 +321,45 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state end end + shared_examples_for 'handling annotations' do |storage_type| + context 'when artifact type is annotations' do + let(:params) do + { + 'artifact_type' => 'annotations', + 'artifact_format' => 'gzip' + }.with_indifferent_access + end + + if storage_type == :object_storage + let(:object_body) { File.read('spec/fixtures/gl-annotations.json.gz') } + let(:upload_filename) { 'gl-annotations.json.gz' } + + before do + stub_request(:get, %r{s3.amazonaws.com/#{remote_path}}) + .to_return(status: 200, body: File.read('spec/fixtures/gl-annotations.json.gz')) + end + else + let(:artifacts_file) do + file_to_upload('spec/fixtures/gl-annotations.json.gz', sha256: artifacts_sha256) + end + end + + it 'calls parse service' do + expect_any_instance_of(Ci::ParseAnnotationsArtifactService) do |service| + expect(service).to receive(:execute).once.and_call_original + end + + expect(execute[:status]).to eq(:success) + expect(job.job_annotations.as_json).to contain_exactly( + hash_including('name' => 'external_links', 'data' => [ + hash_including('external_link' => hash_including('label' => 'URL 1', 'url' => 'https://url1.example.com/')), + hash_including('external_link' => hash_including('label' => 'URL 2', 'url' => 'https://url2.example.com/')) + ]) + ) + end + end + end + shared_examples_for 'handling object storage errors' do shared_examples 'rescues object storage error' do |klass, message, expected_message| it "handles #{klass}" do @@ -495,6 +534,7 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state it_behaves_like 'handling uploads' it_behaves_like 'handling dotenv', :object_storage + it_behaves_like 'handling annotations', :object_storage it_behaves_like 'handling object storage errors' it_behaves_like 'validating requirements' end @@ -506,6 +546,7 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state it_behaves_like 'handling uploads' it_behaves_like 'handling dotenv', :local_storage + it_behaves_like 'handling annotations', :local_storage it_behaves_like 'validating requirements' end end diff --git a/spec/services/ci/parse_annotations_artifact_service_spec.rb b/spec/services/ci/parse_annotations_artifact_service_spec.rb new file mode 100644 index 00000000000..4847447230b --- /dev/null +++ b/spec/services/ci/parse_annotations_artifact_service_spec.rb @@ -0,0 +1,182 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::ParseAnnotationsArtifactService, feature_category: :build_artifacts do + let_it_be(:project) { create(:project) } + + let_it_be_with_reload(:build) { create(:ci_build, project: project) } + let(:service) { described_class.new(project, nil) } + + describe '#execute' do + subject { service.execute(artifact) } + + context 'when build has an annotations artifact' do + let_it_be(:artifact) { create(:ci_job_artifact, :annotations, job: build) } + + context 'when artifact does not have the specified blob' do + before do + allow(artifact).to receive(:each_blob) + end + + it 'parses nothing' do + expect(subject[:status]).to eq(:success) + + expect(build.job_annotations).to be_empty + end + end + + context 'when artifact has the specified blob' do + let(:blob) { data.to_json } + + before do + allow(artifact).to receive(:each_blob).and_yield(blob) + end + + context 'when valid annotations are given' do + let(:data) do + { + external_links: [ + { + external_link: { + label: 'URL 1', + url: 'https://url1.example.com/' + } + }, + { + external_link: { + label: 'URL 2', + url: 'https://url2.example.com/' + } + } + ] + } + end + + it 'parses the artifact' do + subject + + expect(build.job_annotations.as_json).to contain_exactly( + hash_including('name' => 'external_links', 'data' => [ + hash_including('external_link' => hash_including('label' => 'URL 1', 'url' => 'https://url1.example.com/')), + hash_including('external_link' => hash_including('label' => 'URL 2', 'url' => 'https://url2.example.com/')) + ]) + ) + end + end + + context 'when valid annotations are given and annotation list name is the same' do + before do + build.job_annotations.create!(name: 'external_links', data: [ + { + external_link: { + label: 'URL 1', + url: 'https://url1.example.com/' + } + } + ]) + end + + let(:data) do + { + external_links: [ + { + external_link: { + label: 'URL 2', + url: 'https://url2.example.com/' + } + } + ] + } + end + + it 'parses the artifact' do + subject + + expect(build.job_annotations.as_json).to contain_exactly( + hash_including('name' => 'external_links', 'data' => [ + hash_including('external_link' => hash_including('label' => 'URL 2', 'url' => 'https://url2.example.com/')) + ]) + ) + end + end + + context 'when invalid JSON is given' do + let(:blob) { 'Invalid JSON!' } + + it 'returns error' do + expect(subject[:status]).to eq(:error) + expect(subject[:http_status]).to eq(:bad_request) + end + end + + context 'when root is not an object' do + let(:data) { [] } + + it 'returns error' do + expect(subject[:status]).to eq(:error) + expect(subject[:message]).to eq('Annotations files must be a JSON object') + expect(subject[:http_status]).to eq(:bad_request) + end + end + + context 'when item is not a valid annotation list' do + let(:data) { { external_links: {} } } + + it 'returns error' do + expect(subject[:status]).to eq(:error) + expect(subject[:message]).to eq('Validation failed: Data must be a valid json schema') + expect(subject[:http_status]).to eq(:bad_request) + end + end + + context 'when more than limitated annotations are specified in annotations' do + let(:data) do + { + external_links_1: [ + { + external_link: { + label: 'URL', + url: 'https://example.com/' + } + } + ], + external_links_2: [ + { + external_link: { + label: 'URL', + url: 'https://example.com/' + } + } + ] + } + end + + before do + allow(service).to receive(:annotations_num_limit).and_return(1) + end + + it 'returns error' do + expect(subject[:status]).to eq(:error) + expect(subject[:message]).to eq( + "Annotations files cannot have more than #{service.send(:annotations_num_limit)} annotation lists") + expect(subject[:http_status]).to eq(:bad_request) + end + end + end + + context 'when artifact size is too big' do + before do + allow(artifact.file).to receive(:size) { service.send(:annotations_size_limit) + 1.kilobyte } + end + + it 'returns error' do + expect(subject[:status]).to eq(:error) + expect(subject[:message]).to eq( + "Annotations Artifact Too Big. Maximum Allowable Size: #{service.send(:annotations_size_limit)}") + expect(subject[:http_status]).to eq(:bad_request) + end + end + end + end +end diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb index 4ba72b5870d..4d55f310974 100644 --- a/spec/services/projects/fork_service_spec.rb +++ b/spec/services/projects/fork_service_spec.rb @@ -380,7 +380,7 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management end context 'when a project is already forked' do - it 'creates a new poolresository after the project is moved to a new shard' do + it 'creates a new pool repository after the project is moved to a new shard' do project = create(:project, :public, :repository) fork_before_move = fork_project(project, nil, using_service: true) @@ -393,6 +393,9 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management allow_any_instance_of(Gitlab::Git::Repository).to receive(:replicate) allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum) .and_return(::Gitlab::Git::BLANK_SHA) + allow_next_instance_of(Gitlab::Git::ObjectPool) do |object_pool| + allow(object_pool).to receive(:link) + end storage_move = create( :project_repository_storage_move, diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb index af920d51776..d3972009d38 100644 --- a/spec/services/projects/update_repository_storage_service_spec.rb +++ b/spec/services/projects/update_repository_storage_service_spec.rb @@ -12,10 +12,19 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour before do allow(Time).to receive(:now).and_return(time) - allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(%w[default test_second_storage]) + + stub_storage_settings( + 'test_second_storage' => { + 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address, + 'path' => TestEnv::SECOND_STORAGE_PATH + } + ) end context 'without wiki and design repository' do + let!(:shard_default) { create(:shard, name: 'default') } + let!(:shard_second_storage) { create(:shard, name: 'test_second_storage') } + let(:project) { create(:project, :repository, wiki_enabled: false) } let(:destination) { 'test_second_storage' } let(:repository_storage_move) { create(:project_repository_storage_move, :scheduled, container: project, destination_storage_name: destination) } @@ -23,6 +32,12 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour let(:project_repository_double) { double(:repository) } let(:original_project_repository_double) { double(:repository) } + let(:object_pool_double) { double(:object_pool, repository: object_pool_repository_double) } + let(:object_pool_repository_double) { double(:repository) } + + let(:original_object_pool_double) { double(:object_pool, repository: original_object_pool_repository_double) } + let(:original_object_pool_repository_double) { double(:repository) } + before do allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid) @@ -33,6 +48,17 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour allow(Gitlab::Git::Repository).to receive(:new) .with('default', project.repository.raw.relative_path, nil, nil) .and_return(original_project_repository_double) + + allow(Gitlab::Git::ObjectPool).to receive(:new).and_call_original + allow(Gitlab::Git::ObjectPool).to receive(:new) + .with('test_second_storage', anything, anything, anything) + .and_return(object_pool_double) + allow(Gitlab::Git::ObjectPool).to receive(:new) + .with('default', anything, anything, anything) + .and_return(original_object_pool_double) + + allow(original_object_pool_double).to receive(:create) + allow(object_pool_double).to receive(:create) end context 'when the move succeeds' do @@ -124,25 +150,138 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour end end - context 'when a object pool was joined' do - let!(:pool) { create(:pool_repository, :ready, source_project: project) } + context 'with repository pool' do + let(:shard_from) { shard_default } + let(:shard_to) { shard_second_storage } + let(:old_object_pool_checksum) { 'abcd' } + let(:new_object_pool_checksum) { old_object_pool_checksum } - it 'leaves the pool' do - allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original - allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid) + before do + allow(project_repository_double).to receive(:replicate).with(project.repository.raw) + allow(project_repository_double).to receive(:checksum).and_return(checksum) + allow(original_project_repository_double).to receive(:remove) - expect(project_repository_double).to receive(:replicate) - .with(project.repository.raw) - expect(project_repository_double).to receive(:checksum) - .and_return(checksum) - expect(original_project_repository_double).to receive(:remove) + allow(object_pool_repository_double).to receive(:replicate).with(original_object_pool_repository_double) + allow(object_pool_repository_double).to receive(:checksum).and_return(new_object_pool_checksum) + allow(original_object_pool_repository_double).to receive(:checksum).and_return(old_object_pool_checksum) - result = subject.execute - project.reload + allow(object_pool_double).to receive(:link) do |repository| + expect(repository.storage).to eq 'test_second_storage' + end + end - expect(result).to be_success - expect(project.repository_storage).to eq('test_second_storage') - expect(project.reload_pool_repository).to be_nil + context 'when project had a repository pool' do + let!(:pool_repository) { create(:pool_repository, :ready, shard: shard_from, source_project: project) } + + it 'creates a new repository pool and connects project to it' do + result = subject.execute + expect(result).to be_success + + project.reload.cleanup + + new_pool_repository = project.pool_repository + + expect(new_pool_repository).not_to eq(pool_repository) + expect(new_pool_repository.shard).to eq(shard_second_storage) + expect(new_pool_repository.state).to eq('ready') + expect(new_pool_repository.disk_path).to eq(pool_repository.disk_path) + expect(new_pool_repository.source_project).to eq(project) + + expect(object_pool_double).to have_received(:link).with(project.repository.raw) + end + + context 'when feature flag replicate_object_pool_on_move is disabled' do + before do + stub_feature_flags(replicate_object_pool_on_move: false) + end + + it 'just moves the repository without the object pool' do + result = subject.execute + expect(result).to be_success + + project.reload.cleanup + + new_pool_repository = project.pool_repository + + expect(new_pool_repository).to eq(pool_repository) + expect(new_pool_repository.shard).to eq(shard_default) + expect(new_pool_repository.state).to eq('ready') + expect(new_pool_repository.source_project).to eq(project) + + expect(object_pool_repository_double).not_to have_received(:replicate) + expect(object_pool_double).not_to have_received(:link) + end + end + + context 'when new shard has a repository pool' do + let!(:new_pool_repository) { create(:pool_repository, :ready, shard: shard_to, source_project: project) } + + it 'connects project to it' do + result = subject.execute + expect(result).to be_success + + project.reload.cleanup + + project_pool_repository = project.pool_repository + + expect(project_pool_repository).to eq(new_pool_repository) + expect(object_pool_double).to have_received(:link).with(project.repository.raw) + end + end + + context 'when repository does not exist' do + let(:project) { create(:project) } + let(:checksum) { nil } + + it 'does not mirror object pool' do + result = subject.execute + expect(result).to be_success + + expect(object_pool_repository_double).not_to have_received(:replicate) + end + end + + context 'when project belongs to repository pool, but not as a root project' do + let!(:another_project) { create(:project, :repository) } + let!(:pool_repository) { create(:pool_repository, :ready, shard: shard_from, source_project: another_project) } + + before do + project.update!(pool_repository: pool_repository) + end + + it 'creates a new repository pool and connects project to it' do + result = subject.execute + expect(result).to be_success + + project.reload.cleanup + + new_pool_repository = project.pool_repository + + expect(new_pool_repository).not_to eq(pool_repository) + expect(new_pool_repository.shard).to eq(shard_second_storage) + expect(new_pool_repository.state).to eq('ready') + expect(new_pool_repository.source_project).to eq(another_project) + + expect(object_pool_double).to have_received(:link).with(project.repository.raw) + end + end + + context 'when object pool checksum does not match' do + let(:new_object_pool_checksum) { 'not_match' } + + it 'raises an error and does not change state' do + original_count = PoolRepository.count + + expect { subject.execute }.to raise_error(UpdateRepositoryStorageMethods::Error) + + project.reload + + expect(PoolRepository.count).to eq(original_count) + + expect(project.pool_repository).to eq(pool_repository) + expect(project.repository.shard).to eq('default') + end + end end end diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb index b6c54e902a2..d9b2b44980c 100644 --- a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb +++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb @@ -18,6 +18,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do let(:elasticsearch_requests_total) { double('elasticsearch calls total metric') } let(:load_balancing_metric) { double('load balancing metric') } let(:sidekiq_mem_total_bytes) { double('sidekiq mem total bytes') } + let(:completion_seconds_sum_metric) { double('sidekiq completion seconds sum metric') } before do allow(Gitlab::Metrics).to receive(:histogram).and_call_original @@ -36,6 +37,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_redis_requests_total, anything).and_return(redis_requests_total) allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_elasticsearch_requests_total, anything).and_return(elasticsearch_requests_total) allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric) + allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_completion_seconds_sum, anything).and_return(completion_seconds_sum_metric) allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_running_jobs, anything, {}, :all).and_return(running_jobs_metric) allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_concurrency, anything, {}, :all).and_return(concurrency_metric) allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_mem_total_bytes, anything, {}, :all).and_return(sidekiq_mem_total_bytes) @@ -76,8 +78,13 @@ RSpec.shared_context 'server metrics call' do } end + let(:stub_subject) { true } + before do - allow(subject).to receive(:get_thread_cputime).and_return(thread_cputime_before, thread_cputime_after) + if stub_subject + allow(subject).to receive(:get_thread_cputime).and_return(thread_cputime_before, thread_cputime_after) + end + allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after) allow(Gitlab::InstrumentationHelper).to receive(:queue_duration_for_job).with(job).and_return(queue_duration_for_job) allow(ActiveRecord::LogSubscriber).to receive(:runtime).and_return(db_duration * 1000) @@ -93,6 +100,7 @@ RSpec.shared_context 'server metrics call' do allow(running_jobs_metric).to receive(:increment) allow(redis_requests_total).to receive(:increment) allow(elasticsearch_requests_total).to receive(:increment) + allow(completion_seconds_sum_metric).to receive(:increment) allow(queue_duration_seconds).to receive(:observe) allow(user_execution_seconds_metric).to receive(:observe) allow(db_seconds_metric).to receive(:observe) diff --git a/spec/support/shared_contexts/user_contribution_events_shared_context.rb b/spec/support/shared_contexts/user_contribution_events_shared_context.rb index 54407f74bde..caa024d78c9 100644 --- a/spec/support/shared_contexts/user_contribution_events_shared_context.rb +++ b/spec/support/shared_contexts/user_contribution_events_shared_context.rb @@ -143,6 +143,7 @@ RSpec.shared_context 'with user contribution events' do # reopened let_it_be(:reopened_issue_event) { create(:event, :reopened, author: user, project: project, target: issue) } let_it_be(:reopened_milestone_event) { create(:event, :reopened, author: user, project: project, target: milestone) } + let_it_be(:reopened_task_event) { create(:event, :reopened, author: user, project: project, target: task) } let_it_be(:reopened_incident_event) { create(:event, :reopened, author: user, project: project, target: incident) } let_it_be(:reopened_merge_request_event) do create(:event, :reopened, author: user, project: project, target: merge_request) diff --git a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb index 30041456d00..19001abcbe2 100644 --- a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb +++ b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb @@ -22,6 +22,14 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context it 'returns no items' do expect(items).to be_empty end + + context 'when there are group-level work items' do + let!(:group_work_item) { create(:work_item, namespace: create(:group)) } + + it 'returns no items' do + expect(items).to be_empty + end + end end context 'when filtering by group id' do diff --git a/yarn.lock b/yarn.lock index 8cd3137a593..145eb3e665e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5774,10 +5774,10 @@ eslint-import-resolver-node@^0.3.7: is-core-module "^2.11.0" resolve "^1.22.1" -eslint-import-resolver-webpack@0.13.2: - version "0.13.2" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-webpack/-/eslint-import-resolver-webpack-0.13.2.tgz#fc813df0d08b9265cc7072d22393bda5198bdc1e" - integrity sha512-XodIPyg1OgE2h5BDErz3WJoK7lawxKTJNhgPNafRST6csC/MZC+L5P6kKqsZGRInpbgc02s/WZMrb4uGJzcuRg== +eslint-import-resolver-webpack@0.13.4: + version "0.13.4" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-webpack/-/eslint-import-resolver-webpack-0.13.4.tgz#77b3ea5dabc483053c75ccf29d0fe8f706a71897" + integrity sha512-6RN3DFoOu8J05VAjuclAquTiLou/JYZx4x7qoL2rC96LmNqYyIwszSqb+Ys1Q+eA6qvQhXYKDaHnEpHmDA0qBw== dependencies: array-find "^1.0.0" debug "^3.2.7" @@ -5785,11 +5785,11 @@ eslint-import-resolver-webpack@0.13.2: find-root "^1.1.0" has "^1.0.3" interpret "^1.4.0" - is-core-module "^2.7.0" + is-core-module "^2.13.0" is-regex "^1.1.4" lodash "^4.17.21" - resolve "^1.20.0" - semver "^5.7.1" + resolve "^1.22.4" + semver "^5.7.2" eslint-module-utils@^2.8.0: version "2.8.0" @@ -7424,10 +7424,10 @@ is-ci@^2.0.0: dependencies: ci-info "^2.0.0" -is-core-module@^2.11.0, is-core-module@^2.12.0, is-core-module@^2.12.1, is-core-module@^2.5.0, is-core-module@^2.7.0: - version "2.12.1" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.12.1.tgz#0c0b6885b6f80011c71541ce15c8d66cf5a4f9fd" - integrity sha512-Q4ZuBAe2FUsKtyQJoQHlvP8OvBERxO3jEmy1I7hcRXcJBGGHFh/aJBswbXuS9sgrDH2QUO8ilkwNPHvHMd8clg== +is-core-module@^2.11.0, is-core-module@^2.12.1, is-core-module@^2.13.0, is-core-module@^2.5.0: + version "2.13.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.0.tgz#bb52aa6e2cbd49a30c2ba68c42bf3435ba6072db" + integrity sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ== dependencies: has "^1.0.3" @@ -11146,12 +11146,12 @@ resolve.exports@^1.1.0: resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== -resolve@^1.10.0, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.20.0, resolve@^1.22.1, resolve@^1.22.3, resolve@^1.9.0: - version "1.22.3" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.3.tgz#4b4055349ffb962600972da1fdc33c46a4eb3283" - integrity sha512-P8ur/gp/AmbEzjr729bZnLjXK5Z+4P0zhIJgBgzqRih7hL7BOukHGtSTA3ACMY467GRFz3duQsi0bDZdR7DKdw== +resolve@^1.10.0, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.20.0, resolve@^1.22.1, resolve@^1.22.3, resolve@^1.22.4, resolve@^1.9.0: + version "1.22.4" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.4.tgz#1dc40df46554cdaf8948a486a10f6ba1e2026c34" + integrity sha512-PXNdCiPqDqeUou+w1C2eTQbNfxKSuMxqTCuvlmmMsk1NWHL5fRrhY6Pl0qEYYc6+QqGClco1Qj8XnjPego4wfg== dependencies: - is-core-module "^2.12.0" + is-core-module "^2.13.0" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" @@ -11374,10 +11374,10 @@ selfsigned@^2.1.1: dependencies: node-forge "^1" -"semver@2 || 3 || 4 || 5", semver@^5.5.0, semver@^5.6.0, semver@^5.7.1: - version "5.7.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" - integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== +"semver@2 || 3 || 4 || 5", semver@^5.5.0, semver@^5.6.0, semver@^5.7.1, semver@^5.7.2: + version "5.7.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== semver@7.0.0, semver@~7.0.0: version "7.0.0"