Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-10-29 12:14:45 +00:00
parent a056c4d05f
commit 27f6da0ab2
68 changed files with 1727 additions and 494 deletions

View File

@ -182,7 +182,7 @@ setup-test-env:
extends:
- .rails-job-base
- .setup-test-env-cache
- .rails:rules:code-backstage-qa
- .rails:rules:setup-test-env
stage: prepare
variables:
SETUP_DB: "false"

View File

@ -211,6 +211,7 @@
.startup-css-patterns: &startup-css-patterns
- "{,ee/,jh/}app/assets/stylesheets/startup/**/*"
# Backend patterns + .ci-patterns
.backend-patterns: &backend-patterns
- "{,jh/}Gemfile{,.lock}"
- "Rakefile"
@ -224,6 +225,7 @@
- ".gitlab/ci/**/*"
- "*_VERSION"
# DB patterns + .ci-patterns
.db-patterns: &db-patterns
- "{,ee/,jh/}{,spec/}{db,migrations}/**/*"
- "{,ee/,jh/}{,spec/}lib/{,ee/,jh/}gitlab/database/**/*"
@ -252,6 +254,11 @@
- "{,ee/,jh/}spec/**/*"
- "{,spec/}tooling/**/*"
.qa-patterns: &qa-patterns
- ".dockerignore"
- "qa/**/*"
# Code patterns + .ci-patterns + .workhorse-patterns
.code-patterns: &code-patterns
- "{package.json,yarn.lock}"
- ".browserslistrc"
@ -260,9 +267,6 @@
- ".csscomb.json"
- "Dockerfile.assets"
- "vendor/assets/**/*"
# CI changes
- ".gitlab-ci.yml"
- ".gitlab/ci/**/*"
- ".{eslintignore,gitattributes,nvmrc,prettierrc,stylelintrc,yamllint}"
- ".{codeclimate,eslintrc,gitlab-ci,haml-lint,haml-lint_todo,rubocop,rubocop_todo,rubocop_manual_todo}.yml"
- "*_VERSION"
@ -273,11 +277,11 @@
- "{,ee/,jh/}{app,bin,config,db,generator_templates,haml_lint,lib,locale,public,scripts,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
- "data/whats_new/*.yml"
# CI changes
- ".gitlab-ci.yml"
- ".gitlab/ci/**/*"
.qa-patterns: &qa-patterns
- ".dockerignore"
- "qa/**/*"
# .code-patterns + .backstage-patterns
.code-backstage-patterns: &code-backstage-patterns
- "{package.json,yarn.lock}"
- ".browserslistrc"
@ -286,9 +290,6 @@
- ".csscomb.json"
- "Dockerfile.assets"
- "vendor/assets/**/*"
# CI changes
- ".gitlab-ci.yml"
- ".gitlab/ci/**/*"
- ".{eslintignore,gitattributes,nvmrc,prettierrc,stylelintrc,yamllint}"
- ".{codeclimate,eslintrc,gitlab-ci,haml-lint,haml-lint_todo,rubocop,rubocop_todo,rubocop_manual_todo}.yml"
- "*_VERSION"
@ -299,6 +300,9 @@
- "{,ee/,jh/}{app,bin,config,db,generator_templates,haml_lint,lib,locale,public,scripts,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
- "data/whats_new/*.yml"
# CI changes
- ".gitlab-ci.yml"
- ".gitlab/ci/**/*"
# Backstage changes
- "Dangerfile"
- "danger/**/*"
@ -307,6 +311,7 @@
- "{,ee/,jh/}spec/**/*"
- "{,spec/}tooling/**/*"
# .code-patterns + .qa-patterns
.code-qa-patterns: &code-qa-patterns
- "{package.json,yarn.lock}"
- ".browserslistrc"
@ -315,9 +320,6 @@
- ".csscomb.json"
- "Dockerfile.assets"
- "vendor/assets/**/*"
# CI changes
- ".gitlab-ci.yml"
- ".gitlab/ci/**/*"
- ".{eslintignore,gitattributes,nvmrc,prettierrc,stylelintrc,yamllint}"
- ".{codeclimate,eslintrc,gitlab-ci,haml-lint,haml-lint_todo,rubocop,rubocop_todo,rubocop_manual_todo}.yml"
- "*_VERSION"
@ -328,10 +330,14 @@
- "{,ee/,jh/}{app,bin,config,db,generator_templates,haml_lint,lib,locale,public,scripts,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
- "data/whats_new/*.yml"
# CI changes
- ".gitlab-ci.yml"
- ".gitlab/ci/**/*"
# QA changes
- ".dockerignore"
- "qa/**/*"
# .code-patterns + .backstage-patterns + .qa-patterns
.code-backstage-qa-patterns: &code-backstage-qa-patterns
- "{package.json,yarn.lock}"
- ".browserslistrc"
@ -340,9 +346,6 @@
- ".csscomb.json"
- "Dockerfile.assets"
- "vendor/assets/**/*"
# CI changes
- ".gitlab-ci.yml"
- ".gitlab/ci/**/*"
- ".{eslintignore,gitattributes,nvmrc,prettierrc,stylelintrc,yamllint}"
- ".{codeclimate,eslintrc,gitlab-ci,haml-lint,haml-lint_todo,rubocop,rubocop_todo,rubocop_manual_todo}.yml"
- "*_VERSION"
@ -353,6 +356,9 @@
- "{,ee/,jh/}{app,bin,config,db,generator_templates,haml_lint,lib,locale,public,scripts,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
- "data/whats_new/*.yml"
# CI changes
- ".gitlab-ci.yml"
- ".gitlab/ci/**/*"
# Backstage changes
- "Dangerfile"
- "danger/**/*"
@ -364,8 +370,44 @@
- ".dockerignore"
- "qa/**/*"
.code-backstage-danger-patterns: &code-backstage-danger-patterns
# .code-backstage-qa-patterns + .workhorse-patterns
.setup-test-env-patterns: &setup-test-env-patterns
- "{package.json,yarn.lock}"
- ".browserslistrc"
- "babel.config.js"
- "jest.config.{base,integration,unit}.js"
- ".csscomb.json"
- "Dockerfile.assets"
- "vendor/assets/**/*"
- ".{eslintignore,gitattributes,nvmrc,prettierrc,stylelintrc,yamllint}"
- ".{codeclimate,eslintrc,gitlab-ci,haml-lint,haml-lint_todo,rubocop,rubocop_todo,rubocop_manual_todo}.yml"
- "*_VERSION"
- "{,jh/}Gemfile{,.lock}"
- "Rakefile"
- "tests.yml"
- "config.ru"
- "{,ee/,jh/}{app,bin,config,db,generator_templates,haml_lint,lib,locale,public,scripts,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
- "data/whats_new/*.yml"
# CI changes
- ".gitlab-ci.yml"
- ".gitlab/ci/**/*"
# Backstage changes
- "Dangerfile"
- "danger/**/*"
- "{,ee/,jh/}fixtures/**/*"
- "{,ee/,jh/}rubocop/**/*"
- "{,ee/,jh/}spec/**/*"
- "{,spec/}tooling/**/*"
# QA changes
- ".dockerignore"
- "qa/**/*"
# Workhorse changes
- "GITLAB_WORKHORSE_VERSION"
- "workhorse/**/*"
- ".gitlab/ci/workhorse.gitlab-ci.yml"
.danger-patterns: &danger-patterns
- "Dangerfile"
- "danger/**/*"
- "tooling/danger/**/*"
@ -713,6 +755,11 @@
###############
# Rails rules #
###############
.rails:rules:setup-test-env:
rules:
- changes: *setup-test-env-patterns
- <<: *if-merge-request-labels-run-all-rspec
.rails:rules:decomposed-databases:
rules:
- <<: *if-merge-request-labels-run-decomposed
@ -1632,7 +1679,7 @@
.review:rules:danger-local:
rules:
- if: '$CI_MERGE_REQUEST_IID'
changes: *code-backstage-danger-patterns
changes: *danger-patterns
###############
# Setup rules #

View File

@ -1 +1 @@
6857897d8b352850ed2c3f5c9396c782a930d052
79a0dfb018671957bbc1a02e21684c8cc2858160

View File

@ -0,0 +1,61 @@
<script>
import { GlPopover, GlLink, GlIcon } from '@gitlab/ui';
export default {
name: 'MetricPopover',
components: {
GlPopover,
GlLink,
GlIcon,
},
props: {
metric: {
type: Object,
required: true,
},
target: {
type: String,
required: true,
},
},
computed: {
metricLinks() {
return this.metric.links?.filter((link) => !link.docs_link) || [];
},
docsLink() {
return this.metric.links?.find((link) => link.docs_link);
},
},
};
</script>
<template>
<gl-popover :target="target" placement="bottom">
<template #title>
<span class="gl-display-block gl-text-left" data-testid="metric-label">{{
metric.label
}}</span>
</template>
<div
v-for="(link, idx) in metricLinks"
:key="`link-${idx}`"
class="gl-display-flex gl-justify-content-space-between gl-text-right gl-py-1"
data-testid="metric-link"
>
<span>{{ link.label }}</span>
<gl-link :href="link.url" class="gl-font-sm">
{{ link.name }}
</gl-link>
</div>
<span v-if="metric.description" data-testid="metric-description">{{ metric.description }}</span>
<gl-link
v-if="docsLink"
:href="docsLink.url"
class="gl-font-sm"
target="_blank"
data-testid="metric-docs-link"
>{{ docsLink.label }}
<gl-icon name="external-link" class="gl-vertical-align-middle" />
</gl-link>
</gl-popover>
</template>

View File

@ -1,11 +1,13 @@
<script>
import { GlDeprecatedSkeletonLoading as GlSkeletonLoading, GlPopover } from '@gitlab/ui';
import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
import { GlSingleStat } from '@gitlab/ui/dist/charts';
import { flatten } from 'lodash';
import createFlash from '~/flash';
import { sprintf, s__ } from '~/locale';
import { redirectTo } from '~/lib/utils/url_utility';
import { METRICS_POPOVER_CONTENT } from '../constants';
import { removeFlash, prepareTimeMetricsData } from '../utils';
import MetricPopover from './metric_popover.vue';
const requestData = ({ request, endpoint, path, params, name }) => {
return request({ endpoint, params, requestPath: path })
@ -31,9 +33,9 @@ const fetchMetricsData = (reqs = [], path, params) => {
export default {
name: 'ValueStreamMetrics',
components: {
GlPopover,
GlSingleStat,
GlSkeletonLoading,
MetricPopover,
},
props: {
requestPath: {
@ -76,6 +78,14 @@ export default {
this.isLoading = false;
});
},
hasLinks(links) {
return links?.length && links[0].url;
},
clickHandler({ links }) {
if (this.hasLinks(links)) {
redirectTo(links[0].url);
}
},
},
};
</script>
@ -93,14 +103,11 @@ export default {
:unit="metric.unit || ''"
:should-animate="true"
:animation-decimal-places="1"
:class="{ 'gl-hover-cursor-pointer': hasLinks(metric.links) }"
tabindex="0"
@click="clickHandler(metric)"
/>
<gl-popover :target="metric.key" placement="bottom">
<template #title>
<span class="gl-display-block gl-text-left">{{ metric.label }}</span>
</template>
<span v-if="metric.description">{{ metric.description }}</span>
</gl-popover>
<metric-popover :metric="metric" :target="metric.key" />
</div>
</template>
</div>

View File

@ -23,6 +23,7 @@ import PackageFiles from '~/packages_and_registries/package_registry/components/
import PackageHistory from '~/packages_and_registries/package_registry/components/details/package_history.vue';
import PackageTitle from '~/packages_and_registries/package_registry/components/details/package_title.vue';
import VersionRow from '~/packages_and_registries/package_registry/components/details/version_row.vue';
import DeletePackage from '~/packages_and_registries/package_registry/components/functional/delete_package.vue';
import {
PACKAGE_TYPE_NUGET,
PACKAGE_TYPE_COMPOSER,
@ -35,12 +36,10 @@ import {
CANCEL_DELETE_PACKAGE_FILE_TRACKING_ACTION,
SHOW_DELETE_SUCCESS_ALERT,
FETCH_PACKAGE_DETAILS_ERROR_MESSAGE,
DELETE_PACKAGE_ERROR_MESSAGE,
DELETE_PACKAGE_FILE_ERROR_MESSAGE,
DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
} from '~/packages_and_registries/package_registry/constants';
import destroyPackageMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package.mutation.graphql';
import destroyPackageFileMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package_file.mutation.graphql';
import getPackageDetails from '~/packages_and_registries/package_registry/graphql/queries/get_package_details.query.graphql';
import Tracking from '~/tracking';
@ -62,6 +61,7 @@ export default {
AdditionalMetadata,
InstallationCommands,
PackageFiles,
DeletePackage,
},
directives: {
GlTooltip: GlTooltipDirective,
@ -148,40 +148,15 @@ export default {
formatSize(size) {
return numberToHumanSize(size);
},
async deletePackage() {
const { data } = await this.$apollo.mutate({
mutation: destroyPackageMutation,
variables: {
id: this.packageEntity.id,
},
});
navigateToListWithSuccessModal() {
const returnTo =
!this.groupListUrl || document.referrer.includes(this.projectName)
? this.projectListUrl
: this.groupListUrl; // to avoid security issue url are supplied from backend
if (data?.destroyPackage?.errors[0]) {
throw data.destroyPackage.errors[0];
}
},
async confirmPackageDeletion() {
this.track(DELETE_PACKAGE_TRACKING_ACTION);
const modalQuery = objectToQuery({ [SHOW_DELETE_SUCCESS_ALERT]: true });
try {
await this.deletePackage();
const returnTo =
!this.groupListUrl || document.referrer.includes(this.projectName)
? this.projectListUrl
: this.groupListUrl; // to avoid security issue url are supplied from backend
const modalQuery = objectToQuery({ [SHOW_DELETE_SUCCESS_ALERT]: true });
window.location.replace(`${returnTo}?${modalQuery}`);
} catch (error) {
createFlash({
message: DELETE_PACKAGE_ERROR_MESSAGE,
type: 'warning',
captureError: true,
error,
});
}
window.location.replace(`${returnTo}?${modalQuery}`);
},
async deletePackageFile(id) {
try {
@ -322,26 +297,33 @@ export default {
</gl-tab>
</gl-tabs>
<gl-modal
ref="deleteModal"
modal-id="delete-modal"
data-testid="delete-modal"
:action-primary="$options.modal.packageDeletePrimaryAction"
:action-cancel="$options.modal.cancelAction"
@primary="confirmPackageDeletion"
@canceled="track($options.trackingActions.CANCEL_DELETE_PACKAGE)"
<delete-package
@start="track($options.trackingActions.DELETE_PACKAGE_TRACKING_ACTION)"
@end="navigateToListWithSuccessModal"
>
<template #modal-title>{{ $options.i18n.deleteModalTitle }}</template>
<gl-sprintf :message="$options.i18n.deleteModalContent">
<template #version>
<strong>{{ packageEntity.version }}</strong>
</template>
<template #default="{ deletePackage }">
<gl-modal
ref="deleteModal"
modal-id="delete-modal"
data-testid="delete-modal"
:action-primary="$options.modal.packageDeletePrimaryAction"
:action-cancel="$options.modal.cancelAction"
@primary="deletePackage(packageEntity)"
@canceled="track($options.trackingActions.CANCEL_DELETE_PACKAGE)"
>
<template #modal-title>{{ $options.i18n.deleteModalTitle }}</template>
<gl-sprintf :message="$options.i18n.deleteModalContent">
<template #version>
<strong>{{ packageEntity.version }}</strong>
</template>
<template #name>
<strong>{{ packageEntity.name }}</strong>
</template>
</gl-sprintf>
</gl-modal>
<template #name>
<strong>{{ packageEntity.name }}</strong>
</template>
</gl-sprintf>
</gl-modal>
</template>
</delete-package>
<gl-modal
ref="deleteFileModal"

View File

@ -0,0 +1,60 @@
<script>
import destroyPackageMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package.mutation.graphql';
import createFlash from '~/flash';
import { s__ } from '~/locale';
export default {
props: {
refetchQueries: {
type: Array,
required: false,
default: null,
},
showSuccessAlert: {
type: Boolean,
required: false,
default: false,
},
},
i18n: {
errorMessage: s__('PackageRegistry|Something went wrong while deleting the package.'),
successMessage: s__('PackageRegistry|Package deleted successfully'),
},
methods: {
async deletePackage(packageEntity) {
try {
this.$emit('start');
const { data } = await this.$apollo.mutate({
mutation: destroyPackageMutation,
variables: {
id: packageEntity.id,
},
awaitRefetchQueries: Boolean(this.refetchQueries),
refetchQueries: this.refetchQueries,
});
if (data?.destroyPackage?.errors[0]) {
throw data.destroyPackage.errors[0];
}
if (this.showSuccessAlert) {
createFlash({
message: this.$options.i18n.successMessage,
type: 'success',
});
}
} catch (error) {
createFlash({
message: this.$options.i18n.errorMessage,
type: 'warning',
captureError: true,
error,
});
}
this.$emit('end');
},
},
render() {
return this.$scopedSlots.default({ deletePackage: this.deletePackage });
},
};
</script>

View File

@ -1,22 +1,19 @@
<script>
/*
* The following component has several commented lines, this is because we are refactoring them piece by piece on several mrs
* For a complete overview of the plan please check: https://gitlab.com/gitlab-org/gitlab/-/issues/330846
* This work is behind feature flag: https://gitlab.com/gitlab-org/gitlab/-/issues/341136
*/
import { GlEmptyState, GlLink, GlSprintf } from '@gitlab/ui';
import createFlash from '~/flash';
import { historyReplaceState } from '~/lib/utils/common_utils';
import { s__ } from '~/locale';
import { DELETE_PACKAGE_SUCCESS_MESSAGE } from '~/packages/list/constants';
import { SHOW_DELETE_SUCCESS_ALERT } from '~/packages/shared/constants';
import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql';
import {
PROJECT_RESOURCE_TYPE,
GROUP_RESOURCE_TYPE,
LIST_QUERY_DEBOUNCE_TIME,
GRAPHQL_PAGE_SIZE,
} from '~/packages_and_registries/package_registry/constants';
import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql';
import DeletePackage from '~/packages_and_registries/package_registry/components/functional/delete_package.vue';
import PackageTitle from './package_title.vue';
import PackageSearch from './package_search.vue';
import PackageList from './packages_list.vue';
@ -29,6 +26,7 @@ export default {
PackageList,
PackageTitle,
PackageSearch,
DeletePackage,
},
inject: [
'packageHelpUrl',
@ -42,6 +40,7 @@ export default {
packages: {},
sort: '',
filters: {},
mutationLoading: false,
};
},
apollo: {
@ -88,6 +87,17 @@ export default {
? this.$options.i18n.emptyPageTitle
: this.$options.i18n.noResultsTitle;
},
isLoading() {
return this.$apollo.queries.packages.loading || this.mutationLoading;
},
refetchQueriesData() {
return [
{
query: getPackagesQuery,
variables: this.queryVariables,
},
];
},
},
mounted() {
this.checkDeleteAlert();
@ -153,25 +163,35 @@ export default {
<package-title :help-url="packageHelpUrl" :count="packagesCount" />
<package-search @update="handleSearchUpdate" />
<package-list
:list="packages.nodes"
:is-loading="$apollo.queries.packages.loading"
:page-info="pageInfo"
@prev-page="fetchPreviousPage"
@next-page="fetchNextPage"
<delete-package
:refetch-queries="refetchQueriesData"
show-success-alert
@start="mutationLoading = true"
@end="mutationLoading = false"
>
<template #empty-state>
<gl-empty-state :title="emptyStateTitle" :svg-path="emptyListIllustration">
<template #description>
<gl-sprintf v-if="hasFilters" :message="$options.i18n.widenFilters" />
<gl-sprintf v-else :message="$options.i18n.noResultsText">
<template #noPackagesLink="{ content }">
<gl-link :href="emptyListHelpUrl" target="_blank">{{ content }}</gl-link>
<template #default="{ deletePackage }">
<package-list
:list="packages.nodes"
:is-loading="isLoading"
:page-info="pageInfo"
@prev-page="fetchPreviousPage"
@next-page="fetchNextPage"
@package:delete="deletePackage"
>
<template #empty-state>
<gl-empty-state :title="emptyStateTitle" :svg-path="emptyListIllustration">
<template #description>
<gl-sprintf v-if="hasFilters" :message="$options.i18n.widenFilters" />
<gl-sprintf v-else :message="$options.i18n.noResultsText">
<template #noPackagesLink="{ content }">
<gl-link :href="emptyListHelpUrl" target="_blank">{{ content }}</gl-link>
</template>
</gl-sprintf>
</template>
</gl-sprintf>
</gl-empty-state>
</template>
</gl-empty-state>
</package-list>
</template>
</package-list>
</delete-package>
</div>
</template>

View File

@ -60,21 +60,28 @@ export default {
showPagination() {
return this.pageInfo.hasPreviousPage || this.pageInfo.hasNextPage;
},
showDeleteModal: {
get() {
return Boolean(this.itemToBeDeleted);
},
set(value) {
if (!value) {
this.itemToBeDeleted = null;
}
},
},
},
methods: {
setItemToBeDeleted(item) {
this.itemToBeDeleted = { ...item };
this.track(REQUEST_DELETE_PACKAGE_TRACKING_ACTION);
this.$refs.packageListDeleteModal.show();
},
deleteItemConfirmation() {
this.$emit('package:delete', this.itemToBeDeleted);
this.track(DELETE_PACKAGE_TRACKING_ACTION);
this.itemToBeDeleted = null;
},
deleteItemCanceled() {
this.track(CANCEL_DELETE_PACKAGE_TRACKING_ACTION);
this.itemToBeDeleted = null;
},
},
i18n: {
@ -115,7 +122,7 @@ export default {
</div>
<gl-modal
ref="packageListDeleteModal"
v-model="showDeleteModal"
modal-id="confirm-delete-pacakge"
ok-variant="danger"
@ok="deleteItemConfirmation"

View File

@ -60,9 +60,6 @@ export const TRACKING_ACTION_COPY_COMPOSER_PACKAGE_INCLUDE_COMMAND =
'copy_composer_package_include_command';
export const SHOW_DELETE_SUCCESS_ALERT = 'showSuccessDeleteAlert';
export const DELETE_PACKAGE_ERROR_MESSAGE = s__(
'PackageRegistry|Something went wrong while deleting the package.',
);
export const DELETE_PACKAGE_FILE_ERROR_MESSAGE = s__(
'PackageRegistry|Something went wrong while deleting the package file.',
);

View File

@ -133,9 +133,9 @@ class Projects::JobsController < Projects::ApplicationController
end
def raw
if trace_artifact_file
if @build.trace.archived_trace_exist?
workhorse_set_content_type!
send_upload(trace_artifact_file,
send_upload(@build.job_artifacts_trace.file,
send_params: raw_send_params,
redirect_params: raw_redirect_params)
else
@ -219,10 +219,6 @@ class Projects::JobsController < Projects::ApplicationController
params.permit(job_variables_attributes: %i[key secret_value])
end
def trace_artifact_file
@trace_artifact_file ||= @build.job_artifacts_trace&.file
end
def find_job_as_build
@build = project.builds.find(params[:id])
end

View File

@ -10,6 +10,7 @@ module Ci
include Artifactable
include FileStoreMounter
include EachBatch
include Gitlab::Utils::StrongMemoize
TEST_REPORT_FILE_TYPES = %w[junit].freeze
COVERAGE_REPORT_FILE_TYPES = %w[cobertura].freeze
@ -121,6 +122,9 @@ module Ci
mount_file_store_uploader JobArtifactUploader
skip_callback :save, :after, :store_file!, if: :store_after_commit?
after_commit :store_file_after_commit!, on: [:create, :update], if: :store_after_commit?
validates :file_format, presence: true, unless: :trace?, on: :create
validate :validate_file_format!, unless: :trace?, on: :create
before_save :set_size, if: :file_changed?
@ -335,8 +339,23 @@ module Ci
}
end
def store_after_commit?
strong_memoize(:store_after_commit) do
trace? &&
JobArtifactUploader.direct_upload_enabled? &&
Feature.enabled?(:ci_store_trace_outside_transaction, project, default_enabled: :yaml)
end
end
private
def store_file_after_commit!
return unless previous_changes.key?(:file)
store_file!
update_file_store
end
def set_size
self.size = file.size
end

View File

@ -7,15 +7,13 @@ module FileStoreMounter
def mount_file_store_uploader(uploader)
mount_uploader(:file, uploader)
# This hook is a no-op when the file is uploaded after_commit
after_save :update_file_store, if: :saved_change_to_file?
end
end
private
def update_file_store
# The file.object_store is set during `uploader.store!`
# which happens after object is inserted/updated
self.update_column(:file_store, file.object_store)
# The file.object_store is set during `uploader.store!` and `uploader.migrate!`
update_column(:file_store, file.object_store)
end
end

View File

@ -4,6 +4,7 @@ class AnalyticsSummaryEntity < Grape::Entity
expose :value, safe: true
expose :title
expose :unit, if: { with_unit: true }
expose :links
private

View File

@ -19,6 +19,7 @@ module Ci
def initialize(job)
@job = job
@project = job.project
@pipeline = job.pipeline if ::Feature.enabled?(:ci_update_unlocked_job_artifacts, @project)
end
def authorize(artifact_type:, filesize: nil)
@ -53,7 +54,7 @@ module Ci
private
attr_reader :job, :project
attr_reader :job, :project, :pipeline
def validate_requirements(artifact_type:, filesize:)
return too_large_error if too_large?(artifact_type, filesize)
@ -85,24 +86,32 @@ module Ci
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
artifact = Ci::JobArtifact.new(
artifact_attributes = {
job_id: job.id,
project: project,
file: artifacts_file,
file_type: params[:artifact_type],
file_format: params[:artifact_format],
file_sha256: artifacts_file.sha256,
expire_in: expire_in)
expire_in: expire_in
}
artifact_attributes[:locked] = pipeline.locked if ::Feature.enabled?(:ci_update_unlocked_job_artifacts, project)
artifact = Ci::JobArtifact.new(
artifact_attributes.merge(
file: artifacts_file,
file_type: params[:artifact_type],
file_format: params[:artifact_format],
file_sha256: artifacts_file.sha256
)
)
artifact_metadata = if metadata_file
Ci::JobArtifact.new(
job_id: job.id,
project: project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata_file.sha256,
expire_in: expire_in)
artifact_attributes.merge(
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata_file.sha256
)
)
end
[artifact, artifact_metadata]

View File

@ -5,22 +5,84 @@ module Ci
BATCH_SIZE = 100
def execute(ci_ref, before_pipeline = nil)
query = <<~SQL.squish
UPDATE "ci_pipelines"
SET "locked" = #{::Ci::Pipeline.lockeds[:unlocked]}
WHERE "ci_pipelines"."id" in (
#{collect_pipelines(ci_ref, before_pipeline).select(:id).to_sql}
LIMIT #{BATCH_SIZE}
FOR UPDATE SKIP LOCKED
)
RETURNING "ci_pipelines"."id";
SQL
results = {
unlocked_pipelines: 0,
unlocked_job_artifacts: 0
}
loop do
break if Ci::Pipeline.connection.exec_query(query).empty?
if ::Feature.enabled?(:ci_update_unlocked_job_artifacts, ci_ref.project)
loop do
unlocked_pipelines = []
unlocked_job_artifacts = []
::Ci::Pipeline.transaction do
unlocked_pipelines = unlock_pipelines(ci_ref, before_pipeline)
unlocked_job_artifacts = unlock_job_artifacts(unlocked_pipelines)
end
break if unlocked_pipelines.empty?
results[:unlocked_pipelines] += unlocked_pipelines.length
results[:unlocked_job_artifacts] += unlocked_job_artifacts.length
end
else
query = <<~SQL.squish
UPDATE "ci_pipelines"
SET "locked" = #{::Ci::Pipeline.lockeds[:unlocked]}
WHERE "ci_pipelines"."id" in (
#{collect_pipelines(ci_ref, before_pipeline).select(:id).to_sql}
LIMIT #{BATCH_SIZE}
FOR UPDATE SKIP LOCKED
)
RETURNING "ci_pipelines"."id";
SQL
loop do
unlocked_pipelines = Ci::Pipeline.connection.exec_query(query)
break if unlocked_pipelines.empty?
results[:unlocked_pipelines] += unlocked_pipelines.length
end
end
results
end
# rubocop:disable CodeReuse/ActiveRecord
def unlock_job_artifacts_query(pipeline_ids)
ci_job_artifacts = ::Ci::JobArtifact.arel_table
build_ids = ::Ci::Build.select(:id).where(commit_id: pipeline_ids)
returning = Arel::Nodes::Grouping.new(ci_job_artifacts[:id])
Arel::UpdateManager.new
.table(ci_job_artifacts)
.where(ci_job_artifacts[:job_id].in(Arel.sql(build_ids.to_sql)))
.set([[ci_job_artifacts[:locked], ::Ci::JobArtifact.lockeds[:unlocked]]])
.to_sql + " RETURNING #{returning.to_sql}"
end
# rubocop:enable CodeReuse/ActiveRecord
# rubocop:disable CodeReuse/ActiveRecord
def unlock_pipelines_query(ci_ref, before_pipeline)
ci_pipelines = ::Ci::Pipeline.arel_table
pipelines_scope = ci_ref.pipelines.artifacts_locked
pipelines_scope = pipelines_scope.before_pipeline(before_pipeline) if before_pipeline
pipelines_scope = pipelines_scope.select(:id).limit(BATCH_SIZE).lock('FOR UPDATE SKIP LOCKED')
returning = Arel::Nodes::Grouping.new(ci_pipelines[:id])
Arel::UpdateManager.new
.table(ci_pipelines)
.where(ci_pipelines[:id].in(Arel.sql(pipelines_scope.to_sql)))
.set([[ci_pipelines[:locked], ::Ci::Pipeline.lockeds[:unlocked]]])
.to_sql + " RETURNING #{returning.to_sql}"
end
# rubocop:enable CodeReuse/ActiveRecord
private
def collect_pipelines(ci_ref, before_pipeline)
@ -29,5 +91,17 @@ module Ci
pipeline_scope.artifacts_locked
end
def unlock_job_artifacts(pipelines)
return if pipelines.empty?
::Ci::JobArtifact.connection.exec_query(
unlock_job_artifacts_query(pipelines.rows.flatten)
)
end
def unlock_pipelines(ci_ref, before_pipeline)
::Ci::Pipeline.connection.exec_query(unlock_pipelines_query(ci_ref, before_pipeline))
end
end
end

View File

@ -36,17 +36,9 @@ module Projects
private
def project_members_through_invited_groups
groups_with_ancestors = if ::Feature.enabled?(:linear_participants_service_ancestor_scopes, current_user, default_enabled: :yaml)
visible_groups.self_and_ancestors
else
Gitlab::ObjectHierarchy
.new(visible_groups)
.base_and_ancestors
end
GroupMember
.active_without_invites_and_requests
.with_source_id(groups_with_ancestors.pluck_primary_key)
.with_source_id(visible_groups.self_and_ancestors.pluck_primary_key)
end
def visible_groups

View File

@ -1,31 +1,31 @@
- breadcrumb_title _('Kubernetes')
- page_title _('Kubernetes Cluster')
- active_tab = local_assigns.fetch(:active_tab, 'create')
- provider = params[:provider]
- active_tab = params[:tab] || local_assigns.fetch(:active_tab, 'create')
- is_active_tab_create = active_tab === 'create'
- is_active_tab_add = active_tab === 'add'
= render_gcp_signup_offer
.row.gl-mt-3
.col-md-3
= render 'sidebar'
.col-md-9.js-toggle-container
%ul.nav-links.nav-tabs.gitlab-tabs.nav{ role: 'tablist' }
%li.nav-item{ role: 'presentation' }
%a.nav-link{ href: '#create-cluster-pane', id: 'create-cluster-tab', class: active_when(active_tab == 'create'), data: { toggle: 'tab' }, role: 'tab' }
%span
= create_new_cluster_label(provider: params[:provider])
%li.nav-item{ role: 'presentation' }
%a.nav-link{ href: '#add-cluster-pane', id: 'add-cluster-tab', class: active_when(active_tab == 'add'), data: { toggle: 'tab', qa_selector: 'add_existing_cluster_tab' }, role: 'tab' }
%span= s_('ClusterIntegration|Connect existing cluster')
.col-md-9
= gl_tabs_nav({ class: 'nav-justified' }) do
= gl_tab_link_to clusterable.new_path(tab: 'create'), { item_active: is_active_tab_create } do
%span= create_new_cluster_label(provider: params[:provider])
= gl_tab_link_to s_('ClusterIntegration|Connect existing cluster'), clusterable.new_path(tab: 'add'), { item_active: is_active_tab_add, qa_selector: 'add_existing_cluster_tab' }
.tab-content.gitlab-tab-content
.tab-pane.p-0{ id: 'create-cluster-pane', class: active_when(active_tab == 'create'), role: 'tabpanel' }
= render 'clusters/clusters/cloud_providers/cloud_provider_selector'
.tab-content
- if is_active_tab_create
.tab-pane.active{ role: 'tabpanel' }
= render 'clusters/clusters/cloud_providers/cloud_provider_selector'
- if ['aws', 'gcp'].include?(provider)
.p-3.border-top
= render "clusters/clusters/#{provider}/new"
- if ['aws', 'gcp'].include?(provider)
.p-3.border-top
= render "clusters/clusters/#{provider}/new"
.tab-pane{ id: 'add-cluster-pane', class: active_when(active_tab == 'add'), role: 'tabpanel' }
#js-cluster-new{ data: js_cluster_new }
= render 'clusters/clusters/user/form'
- if is_active_tab_add
.tab-pane.active.gl-p-5{ role: 'tabpanel' }
#js-cluster-new{ data: js_cluster_new }
= render 'clusters/clusters/user/form'

View File

@ -354,6 +354,15 @@
:weight: 1
:idempotent:
:tags: []
- :name: cronjob:issues_reschedule_stuck_issue_rebalances
:worker_name: Issues::RescheduleStuckIssueRebalancesWorker
:feature_category: :team_planning
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:jira_import_stuck_jira_import_jobs
:worker_name: Gitlab::JiraImport::StuckJiraImportJobsWorker
:feature_category: :importers

View File

@ -15,9 +15,12 @@ module Ci
::Ci::Pipeline.find_by_id(pipeline_id).try do |pipeline|
break unless pipeline.has_archive_artifacts?
::Ci::UnlockArtifactsService
results = ::Ci::UnlockArtifactsService
.new(pipeline.project, pipeline.user)
.execute(pipeline.ci_ref, pipeline)
log_extra_metadata_on_done(:unlocked_pipelines, results[:unlocked_pipelines])
log_extra_metadata_on_done(:unlocked_job_artifacts, results[:unlocked_job_artifacts])
end
end
end

View File

@ -15,9 +15,12 @@ module Ci
::Project.find_by_id(project_id).try do |project|
::User.find_by_id(user_id).try do |user|
project.ci_refs.find_by_ref_path(ref_path).try do |ci_ref|
::Ci::UnlockArtifactsService
results = ::Ci::UnlockArtifactsService
.new(project, user)
.execute(ci_ref)
log_extra_metadata_on_done(:unlocked_pipelines, results[:unlocked_pipelines])
log_extra_metadata_on_done(:unlocked_job_artifacts, results[:unlocked_job_artifacts])
end
end
end

View File

@ -118,7 +118,9 @@ class EmailReceiverWorker # rubocop:disable Scalability/IdempotentWorker
end
if reason
EmailRejectionMailer.rejection(reason, raw, can_retry).deliver_later
receiver.mail.body = nil
EmailRejectionMailer.rejection(reason, receiver.mail.encoded, can_retry).deliver_later
end
end
end

View File

@ -19,6 +19,7 @@ class IssueRebalancingWorker
# we need to have exactly one of the project_id and root_namespace_id params be non-nil
raise ArgumentError, "Expected only one of the params project_id: #{project_id} and root_namespace_id: #{root_namespace_id}" if project_id && root_namespace_id
return if project_id.nil? && root_namespace_id.nil?
return if ::Gitlab::Issues::Rebalancing::State.rebalance_recently_finished?(project_id, root_namespace_id)
# pull the projects collection to be rebalanced either the project if namespace is not a group(i.e. user namesapce)
# or the root namespace, this also makes the worker backward compatible with previous version where a project_id was

View File

@ -0,0 +1,36 @@
# frozen_string_literal: true
module Issues
class RescheduleStuckIssueRebalancesWorker
include ApplicationWorker
include CronjobQueue
data_consistency :sticky
idempotent!
urgency :low
feature_category :team_planning
deduplicate :until_executed, including_scheduled: true
def perform
namespace_ids, project_ids = ::Gitlab::Issues::Rebalancing::State.fetch_rebalancing_groups_and_projects
return if namespace_ids.blank? && project_ids.blank?
namespaces = Namespace.id_in(namespace_ids)
projects = Project.id_in(project_ids)
IssueRebalancingWorker.bulk_perform_async_with_contexts(
namespaces,
arguments_proc: -> (namespace) { [nil, nil, namespace.id] },
context_proc: -> (namespace) { { namespace: namespace } }
)
IssueRebalancingWorker.bulk_perform_async_with_contexts(
projects,
arguments_proc: -> (project) { [nil, project.id, nil] },
context_proc: -> (project) { { project: project } }
)
end
end
end

View File

@ -0,0 +1,8 @@
---
name: ci_store_trace_outside_transaction
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66203
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/336280
milestone: '15.4'
type: development
group: group::pipeline execution
default_enabled: false

View File

@ -1,8 +1,8 @@
---
name: linear_participants_service_ancestor_scopes
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70684
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/341348
milestone: '14.4'
name: ci_update_unlocked_job_artifacts
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70235
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/343465
milestone: '14.5'
type: development
group: group::access
group: group::testing
default_enabled: false

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/341809
milestone: '14.4'
type: development
group: group::code review
default_enabled: false
default_enabled: true

View File

@ -588,6 +588,9 @@ Settings.cron_jobs['ci_delete_unit_tests_worker']['job_class'] = 'Ci::DeleteUnit
Settings.cron_jobs['batched_background_migrations_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['batched_background_migrations_worker']['cron'] ||= '* * * * *'
Settings.cron_jobs['batched_background_migrations_worker']['job_class'] = 'Database::BatchedBackgroundMigrationWorker'
Settings.cron_jobs['issues_reschedule_stuck_issue_rebalances'] ||= Settingslogic.new({})
Settings.cron_jobs['issues_reschedule_stuck_issue_rebalances']['cron'] ||= '* 0/15 * * *'
Settings.cron_jobs['issues_reschedule_stuck_issue_rebalances']['job_class'] = 'Issues::RescheduleStuckIssueRebalancesWorker'
Gitlab.ee do
Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= Settingslogic.new({})

View File

@ -52,7 +52,7 @@ be higher than those defined above.
For example: for the web-service, we want at least 99.8% of requests
to be faster than their target duration.
These are the targets we use for alerting and service montoring. So
These are the targets we use for alerting and service monitoring. So
durations should be set keeping those into account. So we would not
cause alerts. But the goal would be to set the urgency to a target
that users would be satisfied with.
@ -63,7 +63,7 @@ error budget for stage groups.
## Adjusting request urgency
Not all endpoints perform the same type of work, so it is possible to
define different urgencies for different endpoints. An endpoint with a
define different urgency levels for different endpoints. An endpoint with a
lower urgency can have a longer request duration than endpoints that
are high urgency.
@ -90,7 +90,7 @@ a case-by-case basis. Please take the following into account:
1. The workload for some endpoints can sometimes differ greatly
depending on the parameters specified by the caller. The urgency
needs to accomodate that. In some cases, it might be interesting to
needs to accommodate that. In some cases, it might be interesting to
define a separate [application SLI](index.md#defining-a-new-sli)
for what the endpoint is doing.
@ -99,7 +99,7 @@ a case-by-case basis. Please take the following into account:
target. For example, if the `MergeRequests::DraftsController` is
hit for every merge request being viewed, but doesn't need to
render anything in most cases, then we should pick the target that
would still accomodate the endpoint performing work.
would still accommodate the endpoint performing work.
1. Consider the dependent resources consumed by the endpoint. If the endpoint
loads a lot of data from Gitaly or the database and this is causing
@ -117,10 +117,10 @@ a case-by-case basis. Please take the following into account:
should try to keep as short as possible.
1. Traffic characteristics should also be taken into account: if the
trafic to the endpoint is bursty, like CI traffic spinning up a
traffic to the endpoint is bursty, like CI traffic spinning up a
big batch of jobs hitting the same endpoint, then having these
endpoints take 5s is not acceptable from an infrastructure point of
view. We cannot scale up the fleet fast enough to accomodate for
view. We cannot scale up the fleet fast enough to accommodate for
the incoming slow requests alongside the regular traffic.
When lowering the urgency for an existing endpoint, please involve a
@ -146,14 +146,14 @@ information in the logs to determine this:
1. The table loads information for the busiest endpoints by
default. You can speed things up by adding a filter for
`json.caller_id.keyword` and adding the identifier you're intersted
`json.caller_id.keyword` and adding the identifier you're interested
in (for example: `Projects::RawController#show`).
1. Check the [appropriate percentile duration](#request-apdex-slo) for
the service the endpoint is handled by. The overall duration should
be lower than the target you intend to set.
1. Assess if the overall duration is below the intended target. Please also
1. If the overall duration is below the intended target. Please also
check the peaks over time in [this
graph](https://log.gprd.gitlab.net/goto/9319c4a402461d204d13f3a4924a89fc)
in Kibana. Here, the percentile in question should not peak above
@ -232,3 +232,23 @@ get 'client/features', urgency: :low do
# endpoint logic
end
```
### Error budget attribution and ownership
This SLI is used for service level monitoring. It feeds into the
[error budget for stage groups](../stage_group_dashboards.md#error-budget) when
opting in. For more information, read the epic for
[defining custom SLIs and incorporating them into error budgets](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/525)).
The endpoints for the SLI feed into a group's error budget based on the
[feature category declared on it](../feature_categorization/index.md).
To know which endpoints are included for your group, you can see the
request rates on the
[group dashboard for your group](https://dashboards.gitlab.net/dashboards/f/stage-groups/stage-groups).
In the **Budget Attribution** row, the **Puma apdex** log link shows you
how many requests are not meeting a 1s or 5s target.
Learn more about the content of the dashboard in the documentation for
[Dashboards for stage groups](../stage_group_dashboards.md). For more information
on our exploration of the error budget itself, read the infrastructure issue
[Stage group error budget exploration dashboard](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1365).

View File

@ -766,7 +766,7 @@ Payload example:
Merge request events are triggered when:
- A new merge request is created.
- An existing merge request is updated, merged, or closed.
- An existing merge request is updated, approved, unapproved, merged, or closed.
- A commit is added in the source branch.
The available values for `object_attributes.action` in the payload are:

View File

@ -70,7 +70,7 @@ module API
get ':namespace/exists', requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
namespace_path = params[:namespace]
exists = Namespace.by_parent(params[:parent_id]).filter_by_path(namespace_path).exists?
exists = Namespace.without_project_namespaces.by_parent(params[:parent_id]).filter_by_path(namespace_path).exists?
suggestions = exists ? [Namespace.clean_path(namespace_path)] : []
present :exists, exists

View File

@ -78,7 +78,7 @@ module Gitlab
end
def archived_trace_exist?
trace_artifact&.exists?
archived?
end
def live_trace_exist?
@ -156,7 +156,7 @@ module Gitlab
def read_stream
stream = Gitlab::Ci::Trace::Stream.new do
if trace_artifact
if archived?
trace_artifact.open
elsif job.trace_chunks.any?
Gitlab::Ci::Trace::ChunkedIO.new(job)
@ -174,7 +174,7 @@ module Gitlab
def unsafe_write!(mode, &blk)
stream = Gitlab::Ci::Trace::Stream.new do
if trace_artifact
if archived?
raise AlreadyArchivedError, 'Could not write to the archived trace'
elsif current_path
File.open(current_path, mode)
@ -195,7 +195,7 @@ module Gitlab
def unsafe_archive!
raise ArchiveError, 'Job is not finished yet' unless job.complete?
already_archived?.tap do |archived|
archived?.tap do |archived|
destroy_any_orphan_trace_data!
raise AlreadyArchivedError, 'Could not archive again' if archived
end
@ -218,7 +218,7 @@ module Gitlab
end
end
def already_archived?
def archived?
# TODO check checksum to ensure archive completed successfully
# See https://gitlab.com/gitlab-org/gitlab/-/issues/259619
trace_artifact&.archived_trace_exists?
@ -227,11 +227,12 @@ module Gitlab
def destroy_any_orphan_trace_data!
return unless trace_artifact
if already_archived?
# An archive already exists, so make sure to remove the trace chunks
if archived?
# An archive file exists, so remove the trace chunks
erase_trace_chunks!
else
# An archive already exists, but its associated file does not, so remove it
# A trace artifact record exists with no archive file
# but an archive was attempted, so cleanup the associated record
trace_artifact.destroy!
end
end

View File

@ -38,7 +38,8 @@ module Gitlab
serialize(
Summary::DeploymentFrequency.new(
deployments: deployments_summary.value.raw_value,
options: @options),
options: @options,
project: @project),
with_unit: true
)
end

View File

@ -17,6 +17,10 @@ module Gitlab
raise NotImplementedError, "Expected #{self.name} to implement value"
end
def links
[]
end
private
attr_reader :project, :options

View File

@ -6,7 +6,7 @@ module Gitlab
class DeploymentFrequency < Base
include SummaryHelper
def initialize(deployments:, options:, project: nil)
def initialize(deployments:, options:, project:)
@deployments = deployments
super(project: project, options: options)
@ -23,6 +23,13 @@ module Gitlab
def unit
_('per day')
end
def links
[
{ "name" => _('Deployment frequency'), "url" => Gitlab::Routing.url_helpers.charts_project_pipelines_path(project, chart: 'deployment-frequency'), "label" => s_('ValueStreamAnalytics|Dashboard') },
{ "name" => _('Deployment frequency'), "url" => Gitlab::Routing.url_helpers.help_page_path('user/analytics/index', anchor: 'definitions'), "docs_link" => true, "label" => s_('ValueStreamAnalytics|Go to docs') }
]
end
end
end
end

View File

@ -44,6 +44,10 @@ module Gitlab
}
end
def mail
strong_memoize(:mail) { build_mail }
end
private
def handler
@ -54,10 +58,6 @@ module Gitlab
Handler.for(mail, mail_key)
end
def mail
strong_memoize(:mail) { build_mail }
end
def build_mail
Mail::Message.new(@raw)
rescue Encoding::UndefinedConversionError,

View File

@ -4,6 +4,10 @@ module Gitlab
module Issues
module Rebalancing
class State
REDIS_KEY_PREFIX = "gitlab:issues-position-rebalances"
CONCURRENT_RUNNING_REBALANCES_KEY = "#{REDIS_KEY_PREFIX}:running_rebalances"
RECENTLY_FINISHED_REBALANCE_PREFIX = "#{REDIS_KEY_PREFIX}:recently_finished"
REDIS_EXPIRY_TIME = 10.days
MAX_NUMBER_OF_CONCURRENT_REBALANCES = 5
NAMESPACE = 1
@ -21,25 +25,23 @@ module Gitlab
redis.multi do |multi|
# we trigger re-balance for namespaces(groups) or specific user project
value = "#{rebalanced_container_type}/#{rebalanced_container_id}"
multi.sadd(concurrent_running_rebalances_key, value)
multi.expire(concurrent_running_rebalances_key, REDIS_EXPIRY_TIME)
multi.sadd(CONCURRENT_RUNNING_REBALANCES_KEY, value)
multi.expire(CONCURRENT_RUNNING_REBALANCES_KEY, REDIS_EXPIRY_TIME)
end
end
end
def concurrent_running_rebalances_count
with_redis { |redis| redis.scard(concurrent_running_rebalances_key).to_i }
with_redis { |redis| redis.scard(CONCURRENT_RUNNING_REBALANCES_KEY).to_i }
end
def rebalance_in_progress?
all_rebalanced_containers = with_redis { |redis| redis.smembers(concurrent_running_rebalances_key) }
is_running = case rebalanced_container_type
when NAMESPACE
namespace_ids = all_rebalanced_containers.map {|string| string.split("#{NAMESPACE}/").second.to_i }.compact
namespace_ids = self.class.current_rebalancing_containers.map {|string| string.split("#{NAMESPACE}/").second.to_i }.compact
namespace_ids.include?(root_namespace.id)
when PROJECT
project_ids = all_rebalanced_containers.map {|string| string.split("#{PROJECT}/").second.to_i }.compact
project_ids = self.class.current_rebalancing_containers.map {|string| string.split("#{PROJECT}/").second.to_i }.compact
project_ids.include?(projects.take.id) # rubocop:disable CodeReuse/ActiveRecord
else
false
@ -101,36 +103,63 @@ module Gitlab
multi.expire(issue_ids_key, REDIS_EXPIRY_TIME)
multi.expire(current_index_key, REDIS_EXPIRY_TIME)
multi.expire(current_project_key, REDIS_EXPIRY_TIME)
multi.expire(concurrent_running_rebalances_key, REDIS_EXPIRY_TIME)
multi.expire(CONCURRENT_RUNNING_REBALANCES_KEY, REDIS_EXPIRY_TIME)
end
end
end
def cleanup_cache
value = "#{rebalanced_container_type}/#{rebalanced_container_id}"
with_redis do |redis|
redis.multi do |multi|
multi.del(issue_ids_key)
multi.del(current_index_key)
multi.del(current_project_key)
multi.srem(concurrent_running_rebalances_key, "#{rebalanced_container_type}/#{rebalanced_container_id}")
multi.srem(CONCURRENT_RUNNING_REBALANCES_KEY, value)
multi.set(self.class.recently_finished_key(rebalanced_container_type, rebalanced_container_id), true, ex: 1.hour)
end
end
end
def self.rebalance_recently_finished?(project_id, namespace_id)
container_id = project_id || namespace_id
container_type = project_id.present? ? PROJECT : NAMESPACE
Gitlab::Redis::SharedState.with { |redis| redis.get(recently_finished_key(container_type, container_id)) }
end
def self.fetch_rebalancing_groups_and_projects
namespace_ids = []
project_ids = []
current_rebalancing_containers.each do |string|
container_type, container_id = string.split('/', 2).map(&:to_i)
if container_type == NAMESPACE
namespace_ids << container_id
elsif container_type == PROJECT
project_ids << container_id
end
end
[namespace_ids, project_ids]
end
private
def self.current_rebalancing_containers
Gitlab::Redis::SharedState.with { |redis| redis.smembers(CONCURRENT_RUNNING_REBALANCES_KEY) }
end
attr_accessor :root_namespace, :projects, :rebalanced_container_type, :rebalanced_container_id
def too_many_rebalances_running?
concurrent_running_rebalances_count <= MAX_NUMBER_OF_CONCURRENT_REBALANCES
end
def redis_key_prefix
"gitlab:issues-position-rebalances"
end
def issue_ids_key
"#{redis_key_prefix}:#{root_namespace.id}"
"#{REDIS_KEY_PREFIX}:#{root_namespace.id}"
end
def current_index_key
@ -141,8 +170,8 @@ module Gitlab
"#{issue_ids_key}:current_project_id"
end
def concurrent_running_rebalances_key
"#{redis_key_prefix}:running_rebalances"
def self.recently_finished_key(container_type, container_id)
"#{RECENTLY_FINISHED_REBALANCE_PREFIX}:#{container_type}:#{container_id}"
end
def with_redis(&blk)

View File

@ -54,9 +54,12 @@ module Gitlab
strong_memoize(:expiry) do
next duplicate_job.duplicate_key_ttl unless duplicate_job.scheduled?
time_diff = duplicate_job.scheduled_at.to_i - Time.now.to_i
time_diff = [
duplicate_job.scheduled_at.to_i - Time.now.to_i,
0
].max
time_diff > 0 ? time_diff : duplicate_job.duplicate_key_ttl
time_diff + duplicate_job.duplicate_key_ttl
end
end
end

View File

@ -24512,6 +24512,9 @@ msgstr ""
msgid "PackageRegistry|Package Registry"
msgstr ""
msgid "PackageRegistry|Package deleted successfully"
msgstr ""
msgid "PackageRegistry|Package file deleted successfully"
msgstr ""
@ -37687,6 +37690,12 @@ msgstr ""
msgid "ValueStreamAnalytics|Average number of deployments to production per day."
msgstr ""
msgid "ValueStreamAnalytics|Dashboard"
msgstr ""
msgid "ValueStreamAnalytics|Go to docs"
msgstr ""
msgid "ValueStreamAnalytics|Items in Value Stream Analytics are currently filtered by their creation time. There is an %{epic_link_start}epic%{epic_link_end} that will change the Value Stream Analytics date filter to use the end event time for the selected stage."
msgstr ""

View File

@ -463,12 +463,25 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
end
context 'when job has trace' do
context 'when job has live trace' do
let(:job) { create(:ci_build, :running, :trace_live, pipeline: pipeline) }
it "has_trace is true" do
it 'has_trace is true' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['has_trace']).to be true
end
end
context 'when has live trace and unarchived artifact' do
let(:job) { create(:ci_build, :running, :trace_live, :unarchived_trace_artifact, pipeline: pipeline) }
it 'has_trace is true' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['has_trace']).to be true
end
@ -631,15 +644,25 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
end
context 'when job has a trace' do
context 'when job has a live trace' do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'returns a trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/build_trace')
expect(json_response['id']).to eq job.id
expect(json_response['status']).to eq job.status
expect(json_response['lines']).to eq [{ 'content' => [{ 'text' => 'BUILD TRACE' }], 'offset' => 0 }]
shared_examples_for 'returns trace' do
it 'returns a trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/build_trace')
expect(json_response['id']).to eq job.id
expect(json_response['status']).to eq job.status
expect(json_response['lines']).to match_array [{ 'content' => [{ 'text' => 'BUILD TRACE' }], 'offset' => 0 }]
end
end
it_behaves_like 'returns trace'
context 'when job has unarchived artifact' do
let(:job) { create(:ci_build, :trace_live, :unarchived_trace_artifact, pipeline: pipeline) }
it_behaves_like 'returns trace'
end
context 'when job is running' do
@ -1055,9 +1078,7 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
post_erase
end
context 'when job is erasable' do
let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline) }
shared_examples_for 'erases' do
it 'redirects to the erased job page' do
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_job_path(id: job.id))
@ -1073,7 +1094,19 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
end
context 'when job is not erasable' do
context 'when job is successful and has artifacts' do
let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline) }
it_behaves_like 'erases'
end
context 'when job has live trace and unarchived artifact' do
let(:job) { create(:ci_build, :success, :trace_live, :unarchived_trace_artifact, pipeline: pipeline) }
it_behaves_like 'erases'
end
context 'when job is erased' do
let(:job) { create(:ci_build, :erased, pipeline: pipeline) }
it 'returns unprocessable_entity' do
@ -1165,16 +1198,26 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
end
context "when job has a trace file" do
context 'when job has a live trace' do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'sends a trace file' do
response = subject
shared_examples_for 'sends live trace' do
it 'sends a trace file' do
response = subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers["Content-Type"]).to eq("text/plain; charset=utf-8")
expect(response.headers["Content-Disposition"]).to match(/^inline/)
expect(response.body).to eq("BUILD TRACE")
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers["Content-Type"]).to eq("text/plain; charset=utf-8")
expect(response.headers["Content-Disposition"]).to match(/^inline/)
expect(response.body).to eq("BUILD TRACE")
end
end
it_behaves_like 'sends live trace'
context 'and when job has unarchived artifact' do
let(:job) { create(:ci_build, :trace_live, :unarchived_trace_artifact, pipeline: pipeline) }
it_behaves_like 'sends live trace'
end
end

View File

@ -282,6 +282,12 @@ FactoryBot.define do
end
end
trait :unarchived_trace_artifact do
after(:create) do |build, evaluator|
create(:ci_job_artifact, :unarchived_trace_artifact, job: build)
end
end
trait :trace_with_duplicate_sections do
after(:create) do |build, evaluator|
trace = File.binread(

View File

@ -87,6 +87,17 @@ FactoryBot.define do
end
end
trait :unarchived_trace_artifact do
file_type { :trace }
file_format { :raw }
after(:build) do |artifact, evaluator|
file = double('file', path: '/path/to/job.log')
artifact.file = file
allow(artifact.file).to receive(:file).and_return(CarrierWave::SanitizedFile.new(file))
end
end
trait :junit do
file_type { :junit }
file_format { :gzip }

View File

@ -213,6 +213,14 @@ FactoryBot.define do
end
end
trait :with_persisted_artifacts do
status { :success }
after(:create) do |pipeline, evaluator|
pipeline.builds << create(:ci_build, :artifacts, pipeline: pipeline, project: pipeline.project)
end
end
trait :with_job do
after(:build) do |pipeline, evaluator|
pipeline.builds << build(:ci_build, pipeline: pipeline, project: pipeline.project)

View File

@ -36,8 +36,18 @@ RSpec.describe 'User browses a job', :js do
expect(page).to have_content('Job has been erased')
end
context 'with a failed job' do
let!(:build) { create(:ci_build, :failed, :trace_artifact, pipeline: pipeline) }
context 'with unarchived trace artifact' do
let!(:build) { create(:ci_build, :success, :unarchived_trace_artifact, :coverage, pipeline: pipeline) }
it 'shows no trace message', :js do
wait_for_requests
expect(page).to have_content('This job does not have a trace.')
end
end
context 'with a failed job and live trace' do
let!(:build) { create(:ci_build, :failed, :trace_live, pipeline: pipeline) }
it 'displays the failure reason' do
wait_for_all_requests
@ -46,6 +56,18 @@ RSpec.describe 'User browses a job', :js do
".build-job > a[title='test - failed - (unknown failure)']")
end
end
context 'with unarchived trace artifact' do
let!(:artifact) { create(:ci_job_artifact, :unarchived_trace_artifact, job: build) }
it 'displays the failure reason from the live trace' do
wait_for_all_requests
within('.builds-container') do
expect(page).to have_selector(
".build-job > a[title='test - failed - (unknown failure)']")
end
end
end
end
context 'when a failed job has been retried' do

View File

@ -14,6 +14,9 @@
},
"unit": {
"type": "string"
},
"links": {
"type": "array"
}
},
"additionalProperties": false

View File

@ -0,0 +1,102 @@
import { GlLink, GlIcon } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import MetricPopover from '~/cycle_analytics/components/metric_popover.vue';
const MOCK_METRIC = {
key: 'deployment-frequency',
label: 'Deployment Frequency',
value: '10.0',
unit: 'per day',
description: 'Average number of deployments to production per day.',
links: [],
};
describe('MetricPopover', () => {
let wrapper;
const createComponent = (props = {}) => {
return shallowMountExtended(MetricPopover, {
propsData: {
target: 'deployment-frequency',
...props,
},
stubs: {
'gl-popover': { template: '<div><slot name="title"></slot><slot></slot></div>' },
},
});
};
const findMetricLabel = () => wrapper.findByTestId('metric-label');
const findAllMetricLinks = () => wrapper.findAll('[data-testid="metric-link"]');
const findMetricDescription = () => wrapper.findByTestId('metric-description');
const findMetricDocsLink = () => wrapper.findByTestId('metric-docs-link');
const findMetricDocsLinkIcon = () => findMetricDocsLink().find(GlIcon);
afterEach(() => {
wrapper.destroy();
});
it('renders the metric label', () => {
wrapper = createComponent({ metric: MOCK_METRIC });
expect(findMetricLabel().text()).toBe(MOCK_METRIC.label);
});
it('renders the metric description', () => {
wrapper = createComponent({ metric: MOCK_METRIC });
expect(findMetricDescription().text()).toBe(MOCK_METRIC.description);
});
describe('with links', () => {
const links = [
{
name: 'Deployment frequency',
url: '/groups/gitlab-org/-/analytics/ci_cd?tab=deployment-frequency',
label: 'Dashboard',
},
{
name: 'Another link',
url: '/groups/gitlab-org/-/analytics/another-link',
label: 'Another link',
},
];
const docsLink = {
name: 'Deployment frequency',
url: '/help/user/analytics/index#definitions',
label: 'Go to docs',
docs_link: true,
};
const linksWithDocs = [...links, docsLink];
describe.each`
hasDocsLink | allLinks | displayedMetricLinks
${true} | ${linksWithDocs} | ${links}
${false} | ${links} | ${links}
`(
'when one link has docs_link=$hasDocsLink',
({ hasDocsLink, allLinks, displayedMetricLinks }) => {
beforeEach(() => {
wrapper = createComponent({ metric: { ...MOCK_METRIC, links: allLinks } });
});
displayedMetricLinks.forEach((link, idx) => {
it(`renders a link for "${link.name}"`, () => {
const allLinkContainers = findAllMetricLinks();
expect(allLinkContainers.at(idx).text()).toContain(link.name);
expect(allLinkContainers.at(idx).find(GlLink).attributes('href')).toBe(link.url);
});
});
it(`${hasDocsLink ? 'renders' : "doesn't render"} a docs link`, () => {
expect(findMetricDocsLink().exists()).toBe(hasDocsLink);
if (hasDocsLink) {
expect(findMetricDocsLink().attributes('href')).toBe(docsLink.url);
expect(findMetricDocsLink().text()).toBe(docsLink.label);
expect(findMetricDocsLinkIcon().attributes('name')).toBe('external-link');
}
});
},
);
});
});

View File

@ -6,9 +6,11 @@ import waitForPromises from 'helpers/wait_for_promises';
import { METRIC_TYPE_SUMMARY } from '~/api/analytics_api';
import ValueStreamMetrics from '~/cycle_analytics/components/value_stream_metrics.vue';
import createFlash from '~/flash';
import { redirectTo } from '~/lib/utils/url_utility';
import { group } from './mock_data';
jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility');
describe('ValueStreamMetrics', () => {
let wrapper;
@ -68,19 +70,30 @@ describe('ValueStreamMetrics', () => {
expectToHaveRequest({ params: {} });
});
it.each`
index | value | title | unit
${0} | ${metricsData[0].value} | ${metricsData[0].title} | ${metricsData[0].unit}
${1} | ${metricsData[1].value} | ${metricsData[1].title} | ${metricsData[1].unit}
${2} | ${metricsData[2].value} | ${metricsData[2].title} | ${metricsData[2].unit}
${3} | ${metricsData[3].value} | ${metricsData[3].title} | ${metricsData[3].unit}
`(
'renders a single stat component for the $title with value and unit',
({ index, value, title, unit }) => {
describe.each`
index | value | title | unit | clickable
${0} | ${metricsData[0].value} | ${metricsData[0].title} | ${metricsData[0].unit} | ${false}
${1} | ${metricsData[1].value} | ${metricsData[1].title} | ${metricsData[1].unit} | ${false}
${2} | ${metricsData[2].value} | ${metricsData[2].title} | ${metricsData[2].unit} | ${false}
${3} | ${metricsData[3].value} | ${metricsData[3].title} | ${metricsData[3].unit} | ${true}
`('metric tiles', ({ index, value, title, unit, clickable }) => {
it(`renders a single stat component for "${title}" with value and unit`, () => {
const metric = findMetrics().at(index);
expect(metric.props()).toMatchObject({ value, title, unit: unit ?? '' });
},
);
});
it(`${
clickable ? 'redirects' : "doesn't redirect"
} when the user clicks the "${title}" metric`, () => {
const metric = findMetrics().at(index);
metric.vm.$emit('click');
if (clickable) {
expect(redirectTo).toHaveBeenCalledWith(metricsData[index].links[0].url);
} else {
expect(redirectTo).not.toHaveBeenCalled();
}
});
});
it('will not display a loading icon', () => {
expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false);

View File

@ -16,16 +16,15 @@ import PackageFiles from '~/packages_and_registries/package_registry/components/
import PackageHistory from '~/packages_and_registries/package_registry/components/details/package_history.vue';
import PackageTitle from '~/packages_and_registries/package_registry/components/details/package_title.vue';
import VersionRow from '~/packages_and_registries/package_registry/components/details/version_row.vue';
import DeletePackage from '~/packages_and_registries/package_registry/components/functional/delete_package.vue';
import {
FETCH_PACKAGE_DETAILS_ERROR_MESSAGE,
DELETE_PACKAGE_ERROR_MESSAGE,
PACKAGE_TYPE_COMPOSER,
DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
DELETE_PACKAGE_FILE_ERROR_MESSAGE,
PACKAGE_TYPE_NUGET,
} from '~/packages_and_registries/package_registry/constants';
import destroyPackageMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package.mutation.graphql';
import destroyPackageFileMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package_file.mutation.graphql';
import getPackageDetails from '~/packages_and_registries/package_registry/graphql/queries/get_package_details.query.graphql';
import {
@ -34,8 +33,6 @@ import {
packageVersions,
dependencyLinks,
emptyPackageDetailsQuery,
packageDestroyMutation,
packageDestroyMutationError,
packageFiles,
packageDestroyFileMutation,
packageDestroyFileMutationError,
@ -64,14 +61,12 @@ describe('PackagesApp', () => {
function createComponent({
resolver = jest.fn().mockResolvedValue(packageDetailsQuery()),
mutationResolver = jest.fn().mockResolvedValue(packageDestroyMutation()),
fileDeleteMutationResolver = jest.fn().mockResolvedValue(packageDestroyFileMutation()),
} = {}) {
localVue.use(VueApollo);
const requestHandlers = [
[getPackageDetails, resolver],
[destroyPackageMutation, mutationResolver],
[destroyPackageFileMutation, fileDeleteMutationResolver],
];
apolloProvider = createMockApollo(requestHandlers);
@ -82,6 +77,7 @@ describe('PackagesApp', () => {
provide,
stubs: {
PackageTitle,
DeletePackage,
GlModal: {
template: '<div></div>',
methods: {
@ -108,6 +104,7 @@ describe('PackagesApp', () => {
const findDependenciesCountBadge = () => wrapper.findComponent(GlBadge);
const findNoDependenciesMessage = () => wrapper.findByTestId('no-dependencies-message');
const findDependencyRows = () => wrapper.findAllComponents(DependencyRow);
const findDeletePackage = () => wrapper.findComponent(DeletePackage);
afterEach(() => {
wrapper.destroy();
@ -187,14 +184,6 @@ describe('PackagesApp', () => {
});
};
const performDeletePackage = async () => {
await findDeleteButton().trigger('click');
findDeleteModal().vm.$emit('primary');
await waitForPromises();
};
afterEach(() => {
Object.defineProperty(document, 'referrer', {
value: originalReferrer,
@ -220,7 +209,7 @@ describe('PackagesApp', () => {
await waitForPromises();
await performDeletePackage();
findDeletePackage().vm.$emit('end');
expect(window.location.replace).toHaveBeenCalledWith(
'projectListUrl?showSuccessDeleteAlert=true',
@ -234,45 +223,13 @@ describe('PackagesApp', () => {
await waitForPromises();
await performDeletePackage();
findDeletePackage().vm.$emit('end');
expect(window.location.replace).toHaveBeenCalledWith(
'groupListUrl?showSuccessDeleteAlert=true',
);
});
});
describe('request failure', () => {
it('on global failure it displays an alert', async () => {
createComponent({ mutationResolver: jest.fn().mockRejectedValue() });
await waitForPromises();
await performDeletePackage();
expect(createFlash).toHaveBeenCalledWith(
expect.objectContaining({
message: DELETE_PACKAGE_ERROR_MESSAGE,
}),
);
});
it('on payload with error it displays an alert', async () => {
createComponent({
mutationResolver: jest.fn().mockResolvedValue(packageDestroyMutationError()),
});
await waitForPromises();
await performDeletePackage();
expect(createFlash).toHaveBeenCalledWith(
expect.objectContaining({
message: DELETE_PACKAGE_ERROR_MESSAGE,
}),
);
});
});
});
describe('package files', () => {

View File

@ -0,0 +1,160 @@
import { createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import createFlash from '~/flash';
import DeletePackage from '~/packages_and_registries/package_registry/components/functional/delete_package.vue';
import destroyPackageMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package.mutation.graphql';
import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql';
import {
packageDestroyMutation,
packageDestroyMutationError,
packagesListQuery,
} from '../../mock_data';
jest.mock('~/flash');
const localVue = createLocalVue();
describe('DeletePackage', () => {
let wrapper;
let apolloProvider;
let resolver;
let mutationResolver;
const eventPayload = { id: '1' };
function createComponent(propsData = {}) {
localVue.use(VueApollo);
const requestHandlers = [
[getPackagesQuery, resolver],
[destroyPackageMutation, mutationResolver],
];
apolloProvider = createMockApollo(requestHandlers);
wrapper = shallowMountExtended(DeletePackage, {
propsData,
localVue,
apolloProvider,
scopedSlots: {
default(props) {
return this.$createElement('button', {
attrs: {
'data-testid': 'trigger-button',
},
on: {
click: props.deletePackage,
},
});
},
},
});
}
const findButton = () => wrapper.findByTestId('trigger-button');
const clickOnButtonAndWait = (payload) => {
findButton().trigger('click', payload);
return waitForPromises();
};
beforeEach(() => {
resolver = jest.fn().mockResolvedValue(packagesListQuery());
mutationResolver = jest.fn().mockResolvedValue(packageDestroyMutation());
});
afterEach(() => {
wrapper.destroy();
});
it('binds deletePackage method to the default slot', () => {
createComponent();
findButton().trigger('click');
expect(wrapper.emitted('start')).toEqual([[]]);
});
it('calls apollo mutation', async () => {
createComponent();
await clickOnButtonAndWait(eventPayload);
expect(mutationResolver).toHaveBeenCalledWith(eventPayload);
});
it('passes refetchQueries to apollo mutate', async () => {
const variables = { isGroupPage: true };
createComponent({
refetchQueries: [{ query: getPackagesQuery, variables }],
});
await clickOnButtonAndWait(eventPayload);
expect(mutationResolver).toHaveBeenCalledWith(eventPayload);
expect(resolver).toHaveBeenCalledWith(variables);
});
describe('on mutation success', () => {
it('emits end event', async () => {
createComponent();
await clickOnButtonAndWait(eventPayload);
expect(wrapper.emitted('end')).toEqual([[]]);
});
it('does not call createFlash', async () => {
createComponent();
await clickOnButtonAndWait(eventPayload);
expect(createFlash).not.toHaveBeenCalled();
});
it('calls createFlash with the success message when showSuccessAlert is true', async () => {
createComponent({ showSuccessAlert: true });
await clickOnButtonAndWait(eventPayload);
expect(createFlash).toHaveBeenCalledWith({
message: DeletePackage.i18n.successMessage,
type: 'success',
});
});
});
describe.each`
errorType | mutationResolverResponse
${'connectionError'} | ${jest.fn().mockRejectedValue()}
${'localError'} | ${jest.fn().mockResolvedValue(packageDestroyMutationError())}
`('on mutation $errorType', ({ mutationResolverResponse }) => {
beforeEach(() => {
mutationResolver = mutationResolverResponse;
});
it('emits end event', async () => {
createComponent();
await clickOnButtonAndWait(eventPayload);
expect(wrapper.emitted('end')).toEqual([[]]);
});
it('calls createFlash with the error message', async () => {
createComponent({ showSuccessAlert: true });
await clickOnButtonAndWait(eventPayload);
expect(createFlash).toHaveBeenCalledWith({
message: DeletePackage.i18n.errorMessage,
type: 'warning',
captureError: true,
error: expect.any(Error),
});
});
});
});

View File

@ -10,6 +10,7 @@ import PackageListApp from '~/packages_and_registries/package_registry/component
import PackageTitle from '~/packages_and_registries/package_registry/components/list/package_title.vue';
import PackageSearch from '~/packages_and_registries/package_registry/components/list/package_search.vue';
import OriginalPackageList from '~/packages_and_registries/package_registry/components/list/packages_list.vue';
import DeletePackage from '~/packages_and_registries/package_registry/components/functional/delete_package.vue';
import {
PROJECT_RESOURCE_TYPE,
@ -55,6 +56,7 @@ describe('PackagesListApp', () => {
const findSearch = () => wrapper.findComponent(PackageSearch);
const findListComponent = () => wrapper.findComponent(PackageList);
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findDeletePackage = () => wrapper.findComponent(DeletePackage);
const mountComponent = ({
resolver = jest.fn().mockResolvedValue(packagesListQuery()),
@ -72,9 +74,10 @@ describe('PackagesListApp', () => {
stubs: {
GlEmptyState,
GlLoadingIcon,
PackageList,
GlSprintf,
GlLink,
PackageList,
DeletePackage,
},
});
};
@ -228,4 +231,45 @@ describe('PackagesListApp', () => {
expect(findEmptyState().text()).toContain(PackageListApp.i18n.widenFilters);
});
});
describe('delete package', () => {
it('exists and has the correct props', async () => {
mountComponent();
await waitForDebouncedApollo();
expect(findDeletePackage().props()).toMatchObject({
refetchQueries: [{ query: getPackagesQuery, variables: {} }],
showSuccessAlert: true,
});
});
it('deletePackage is bound to package-list package:delete event', async () => {
mountComponent();
await waitForDebouncedApollo();
findListComponent().vm.$emit('package:delete', { id: 1 });
expect(findDeletePackage().emitted('start')).toEqual([[]]);
});
it('start and end event set loading correctly', async () => {
mountComponent();
await waitForDebouncedApollo();
findDeletePackage().vm.$emit('start');
await nextTick();
expect(findListComponent().props('isLoading')).toBe(true);
findDeletePackage().vm.$emit('end');
await nextTick();
expect(findListComponent().props('isLoading')).toBe(false);
});
});
});

View File

@ -122,14 +122,6 @@ describe('packages_list', () => {
expect(findPackageListDeleteModal().text()).toContain(firstPackage.name);
});
it('confirming delete empties itemsToBeDeleted', async () => {
findPackageListDeleteModal().vm.$emit('ok');
await nextTick();
expect(findPackageListDeleteModal().text()).not.toContain(firstPackage.name);
});
it('confirming on the modal emits package:delete', async () => {
findPackageListDeleteModal().vm.$emit('ok');
@ -138,8 +130,9 @@ describe('packages_list', () => {
expect(wrapper.emitted('package:delete')[0]).toEqual([firstPackage]);
});
it('cancel event resets itemToBeDeleted', async () => {
findPackageListDeleteModal().vm.$emit('cancel');
it('closing the modal resets itemToBeDeleted', async () => {
// triggering the v-model
findPackageListDeleteModal().vm.$emit('input', false);
await nextTick();

View File

@ -3,99 +3,134 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Trace::Archive do
let_it_be(:job) { create(:ci_build, :success, :trace_live) }
let_it_be_with_reload(:trace_metadata) { create(:ci_build_trace_metadata, build: job) }
let_it_be(:src_checksum) do
job.trace.read { |stream| Digest::MD5.hexdigest(stream.raw) }
end
let(:metrics) { spy('metrics') }
describe '#execute' do
subject { described_class.new(job, trace_metadata, metrics) }
it 'computes and assigns checksum' do
Gitlab::Ci::Trace::ChunkedIO.new(job) do |stream|
expect { subject.execute!(stream) }.to change { Ci::JobArtifact.count }.by(1)
end
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.trace_artifact).to eq(job.job_artifacts_trace)
context 'with transactional fixtures' do
let_it_be(:job) { create(:ci_build, :success, :trace_live) }
let_it_be_with_reload(:trace_metadata) { create(:ci_build_trace_metadata, build: job) }
let_it_be(:src_checksum) do
job.trace.read { |stream| Digest::MD5.hexdigest(stream.raw) }
end
context 'validating artifact checksum' do
let(:trace) { 'abc' }
let(:stream) { StringIO.new(trace, 'rb') }
let(:src_checksum) { Digest::MD5.hexdigest(trace) }
let(:metrics) { spy('metrics') }
context 'when the object store is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
describe '#execute' do
subject { described_class.new(job, trace_metadata, metrics) }
it 'computes and assigns checksum' do
Gitlab::Ci::Trace::ChunkedIO.new(job) do |stream|
expect { subject.execute!(stream) }.to change { Ci::JobArtifact.count }.by(1)
end
it 'skips validation' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to be_nil
expect(metrics)
.not_to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.trace_artifact).to eq(job.job_artifacts_trace)
end
context 'with background_upload enabled' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it 'skips validation' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to be_nil
expect(metrics)
.not_to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
end
context 'with direct_upload enabled' do
before do
stub_artifacts_object_storage(direct_upload: true)
end
it 'validates the archived trace' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to eq(src_checksum)
expect(metrics)
.not_to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
context 'when the checksum does not match' do
let(:invalid_remote_checksum) { SecureRandom.hex }
context 'validating artifact checksum' do
let(:trace) { 'abc' }
let(:stream) { StringIO.new(trace, 'rb') }
let(:src_checksum) { Digest::MD5.hexdigest(trace) }
context 'when the object store is disabled' do
before do
expect(::Gitlab::Ci::Trace::RemoteChecksum)
.to receive(:new)
.with(an_instance_of(Ci::JobArtifact))
.and_return(double(md5_checksum: invalid_remote_checksum))
stub_artifacts_object_storage(enabled: false)
end
it 'skips validation' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to be_nil
expect(metrics)
.not_to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
end
context 'with background_upload enabled' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it 'skips validation' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to be_nil
expect(metrics)
.not_to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
end
context 'with direct_upload enabled' do
before do
stub_artifacts_object_storage(direct_upload: true)
end
it 'validates the archived trace' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to eq(invalid_remote_checksum)
expect(trace_metadata.remote_checksum).to eq(src_checksum)
expect(metrics)
.to have_received(:increment_error_counter)
.not_to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
context 'when the checksum does not match' do
let(:invalid_remote_checksum) { SecureRandom.hex }
before do
expect(::Gitlab::Ci::Trace::RemoteChecksum)
.to receive(:new)
.with(an_instance_of(Ci::JobArtifact))
.and_return(double(md5_checksum: invalid_remote_checksum))
end
it 'validates the archived trace' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to eq(invalid_remote_checksum)
expect(metrics)
.to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
end
end
end
end
end
context 'without transactional fixtures', :delete do
let(:job) { create(:ci_build, :success, :trace_live) }
let(:trace_metadata) { create(:ci_build_trace_metadata, build: job) }
let(:stream) { StringIO.new('abc', 'rb') }
describe '#execute!' do
subject(:execute) do
::Gitlab::Ci::Trace::Archive.new(job, trace_metadata).execute!(stream)
end
before do
stub_artifacts_object_storage(direct_upload: true)
end
it 'does not upload the trace inside a database transaction', :delete do
expect(Ci::ApplicationRecord.connection.transaction_open?).to be_falsey
allow_next_instance_of(Ci::JobArtifact) do |artifact|
artifact.job_id = job.id
expect(artifact)
.to receive(:store_file!)
.and_wrap_original do |store_method, *args|
expect(Ci::ApplicationRecord.connection.transaction_open?).to be_falsey
store_method.call(*args)
end
end
execute
end
end
end
end

View File

@ -25,16 +25,6 @@ RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state, factory_defa
artifact1.file.migrate!(ObjectStorage::Store::REMOTE)
end
it 'reloads the trace after is it migrated' do
stub_const('Gitlab::HttpIO::BUFFER_SIZE', test_data.length)
expect_next_instance_of(Gitlab::HttpIO) do |http_io|
expect(http_io).to receive(:get_chunk).and_return(test_data, "")
end
expect(artifact2.job.trace.raw).to eq(test_data)
end
it 'reloads the trace in case of a chunk error' do
chunk_error = described_class::ChunkedIO::FailedToGetChunkError

View File

@ -94,7 +94,7 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st
context 'when tracking new rebalance' do
it 'returns as expired for non existent key' do
::Gitlab::Redis::SharedState.with do |redis|
expect(redis.ttl(rebalance_caching.send(:concurrent_running_rebalances_key))).to be < 0
expect(redis.ttl(Gitlab::Issues::Rebalancing::State::CONCURRENT_RUNNING_REBALANCES_KEY)).to be < 0
end
end
@ -102,7 +102,7 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st
rebalance_caching.track_new_running_rebalance
::Gitlab::Redis::SharedState.with do |redis|
expect(redis.ttl(rebalance_caching.send(:concurrent_running_rebalances_key))).to be_between(0, described_class::REDIS_EXPIRY_TIME.ago.to_i)
expect(redis.ttl(Gitlab::Issues::Rebalancing::State::CONCURRENT_RUNNING_REBALANCES_KEY)).to be_between(0, described_class::REDIS_EXPIRY_TIME.ago.to_i)
end
end
end
@ -169,7 +169,7 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st
rebalance_caching.cleanup_cache
expect(check_existing_keys).to eq(0)
expect(check_existing_keys).to eq(1)
end
end
end
@ -183,6 +183,16 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st
it { expect(rebalance_caching.send(:rebalanced_container_type)).to eq(described_class::NAMESPACE) }
it_behaves_like 'issues rebalance caching'
describe '.fetch_rebalancing_groups_and_projects' do
before do
rebalance_caching.track_new_running_rebalance
end
it 'caches recently finished rebalance key' do
expect(described_class.fetch_rebalancing_groups_and_projects).to eq([[group.id], []])
end
end
end
context 'rebalancing issues in a project' do
@ -193,6 +203,16 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st
it { expect(rebalance_caching.send(:rebalanced_container_type)).to eq(described_class::PROJECT) }
it_behaves_like 'issues rebalance caching'
describe '.fetch_rebalancing_groups_and_projects' do
before do
rebalance_caching.track_new_running_rebalance
end
it 'caches recently finished rebalance key' do
expect(described_class.fetch_rebalancing_groups_and_projects).to eq([[], [project.id]])
end
end
end
# count - how many issue ids to generate, issue ids will start at 1
@ -212,11 +232,14 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st
def check_existing_keys
index = 0
# spec only, we do not actually scan keys in the code
recently_finished_keys_count = Gitlab::Redis::SharedState.with { |redis| redis.scan(0, match: "#{described_class::RECENTLY_FINISHED_REBALANCE_PREFIX}:*") }.last.count
index += 1 if rebalance_caching.get_current_index > 0
index += 1 if rebalance_caching.get_current_project_id.present?
index += 1 if rebalance_caching.get_cached_issue_ids(0, 100).present?
index += 1 if rebalance_caching.rebalance_in_progress?
index += 1 if recently_finished_keys_count > 0
index
end

View File

@ -351,6 +351,21 @@ RSpec.describe Ci::JobArtifact do
end
end
context 'when updating any field except the file' do
let(:artifact) { create(:ci_job_artifact, :unarchived_trace_artifact, file_store: 2) }
before do
stub_artifacts_object_storage(direct_upload: true)
artifact.file.object_store = 1
end
it 'the `after_commit` hook does not update `file_store`' do
artifact.update!(expire_at: Time.current)
expect(artifact.file_store).to be(2)
end
end
describe 'validates file format' do
subject { artifact }
@ -507,6 +522,53 @@ RSpec.describe Ci::JobArtifact do
end
end
describe '#store_after_commit?' do
let(:file_type) { :archive }
let(:artifact) { build(:ci_job_artifact, file_type) }
context 'when direct upload is enabled' do
before do
stub_artifacts_object_storage(direct_upload: true)
end
context 'when the artifact is a trace' do
let(:file_type) { :trace }
context 'when ci_store_trace_outside_transaction is enabled' do
it 'returns true' do
expect(artifact.store_after_commit?).to be_truthy
end
end
context 'when ci_store_trace_outside_transaction is disabled' do
before do
stub_feature_flags(ci_store_trace_outside_transaction: false)
end
it 'returns false' do
expect(artifact.store_after_commit?).to be_falsey
end
end
end
context 'when the artifact is not a trace' do
it 'returns false' do
expect(artifact.store_after_commit?).to be_falsey
end
end
end
context 'when direct upload is disabled' do
before do
stub_artifacts_object_storage(direct_upload: false)
end
it 'returns false' do
expect(artifact.store_after_commit?).to be_falsey
end
end
end
describe 'file is being stored' do
subject { create(:ci_job_artifact, :archive) }

View File

@ -308,6 +308,7 @@ RSpec.describe API::Ci::Jobs do
it 'returns no artifacts nor trace data' do
json_job = json_response.first
expect(response).to have_gitlab_http_status(:ok)
expect(json_job['artifacts_file']).to be_nil
expect(json_job['artifacts']).to be_an Array
expect(json_job['artifacts']).to be_empty
@ -426,6 +427,22 @@ RSpec.describe API::Ci::Jobs do
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'when trace artifact record exists with no stored file', :skip_before_request do
before do
create(:ci_job_artifact, :unarchived_trace_artifact, job: job, project: job.project)
end
it 'returns no artifacts nor trace data' do
get api("/projects/#{project.id}/jobs/#{job.id}", api_user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['artifacts']).to be_an Array
expect(json_response['artifacts'].size).to eq(1)
expect(json_response['artifacts'][0]['file_type']).to eq('trace')
expect(json_response['artifacts'][0]['filename']).to eq('job.log')
end
end
end
describe 'DELETE /projects/:id/jobs/:job_id/artifacts' do
@ -1024,7 +1041,16 @@ RSpec.describe API::Ci::Jobs do
end
end
context 'when trace is file' do
context 'when live trace and uploadless trace artifact' do
let(:job) { create(:ci_build, :trace_live, :unarchived_trace_artifact, pipeline: pipeline) }
it 'returns specific job trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq(job.trace.raw)
end
end
context 'when trace is live' do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'returns specific job trace' do
@ -1032,6 +1058,28 @@ RSpec.describe API::Ci::Jobs do
expect(response.body).to eq(job.trace.raw)
end
end
context 'when no trace' do
let(:job) { create(:ci_build, pipeline: pipeline) }
it 'returns empty trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to be_empty
end
end
context 'when trace artifact record exists with no stored file' do
let(:job) { create(:ci_build, pipeline: pipeline) }
before do
create(:ci_job_artifact, :unarchived_trace_artifact, job: job, project: job.project)
end
it 'returns empty trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to be_empty
end
end
end
context 'unauthorized user' do
@ -1143,9 +1191,7 @@ RSpec.describe API::Ci::Jobs do
post api("/projects/#{project.id}/jobs/#{job.id}/erase", user)
end
context 'job is erasable' do
let(:job) { create(:ci_build, :trace_artifact, :artifacts, :test_reports, :success, project: project, pipeline: pipeline) }
shared_examples_for 'erases job' do
it 'erases job content' do
expect(response).to have_gitlab_http_status(:created)
expect(job.job_artifacts.count).to eq(0)
@ -1154,6 +1200,12 @@ RSpec.describe API::Ci::Jobs do
expect(job.artifacts_metadata.present?).to be_falsy
expect(job.has_job_artifacts?).to be_falsy
end
end
context 'job is erasable' do
let(:job) { create(:ci_build, :trace_artifact, :artifacts, :test_reports, :success, project: project, pipeline: pipeline) }
it_behaves_like 'erases job'
it 'updates job' do
job.reload
@ -1163,6 +1215,12 @@ RSpec.describe API::Ci::Jobs do
end
end
context 'when job has an unarchived trace artifact' do
let(:job) { create(:ci_build, :success, :trace_live, :unarchived_trace_artifact, project: project, pipeline: pipeline) }
it_behaves_like 'erases job'
end
context 'job is not erasable' do
let(:job) { create(:ci_build, :trace_live, project: project, pipeline: pipeline) }

View File

@ -272,6 +272,16 @@ RSpec.describe API::Namespaces do
expect(response).to have_gitlab_http_status(:unauthorized)
end
context 'when requesting project_namespace' do
let(:namespace_id) { project_namespace.id }
it 'returns authentication error' do
get api("/namespaces/#{project_namespace.path}/exists"), params: { parent_id: group2.id }
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
context 'when authenticated' do
@ -330,6 +340,18 @@ RSpec.describe API::Namespaces do
expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq(expected_json)
end
context 'when requesting project_namespace' do
let(:namespace_id) { project_namespace.id }
it 'returns JSON indicating the namespace does not exist without a suggestion' do
get api("/namespaces/#{project_namespace.path}/exists", user), params: { parent_id: group2.id }
expected_json = { exists: false, suggests: [] }.to_json
expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq(expected_json)
end
end
end
end
end

View File

@ -36,7 +36,7 @@ RSpec.describe AnalyticsSummarySerializer do
context 'when representing with unit' do
let(:resource) do
Gitlab::CycleAnalytics::Summary::DeploymentFrequency
.new(deployments: 10, options: { from: 1.day.ago })
.new(deployments: 10, options: { from: 1.day.ago }, project: project)
end
subject { described_class.new.represent(resource, with_unit: true) }

View File

@ -49,6 +49,7 @@ RSpec.describe Ci::JobArtifacts::CreateService do
expect(new_artifact.file_type).to eq(params['artifact_type'])
expect(new_artifact.file_format).to eq(params['artifact_format'])
expect(new_artifact.file_sha256).to eq(artifacts_sha256)
expect(new_artifact.locked).to eq(job.pipeline.locked)
end
it 'does not track the job user_id' do
@ -75,6 +76,7 @@ RSpec.describe Ci::JobArtifacts::CreateService do
expect(new_artifact.file_type).to eq('metadata')
expect(new_artifact.file_format).to eq('gzip')
expect(new_artifact.file_sha256).to eq(artifacts_sha256)
expect(new_artifact.locked).to eq(job.pipeline.locked)
end
it 'sets expiration date according to application settings' do

View File

@ -3,93 +3,247 @@
require 'spec_helper'
RSpec.describe Ci::UnlockArtifactsService do
describe '#execute' do
subject(:execute) { described_class.new(pipeline.project, pipeline.user).execute(ci_ref, before_pipeline) }
using RSpec::Parameterized::TableSyntax
where(:tag, :ci_update_unlocked_job_artifacts) do
false | false
false | true
true | false
true | true
end
with_them do
let(:ref) { 'master' }
let(:ref_path) { tag ? "#{::Gitlab::Git::TAG_REF_PREFIX}#{ref}" : "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{ref}" }
let(:ci_ref) { create(:ci_ref, ref_path: ref_path) }
let(:project) { ci_ref.project }
let(:source_job) { create(:ci_build, pipeline: pipeline) }
let!(:old_unlocked_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :unlocked) }
let!(:older_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
let!(:older_ambiguous_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: !tag, project: project, locked: :artifacts_locked) }
let!(:pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
let!(:child_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
let!(:newer_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
let!(:other_ref_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: 'other_ref', tag: tag, project: project, locked: :artifacts_locked) }
let!(:sources_pipeline) { create(:ci_sources_pipeline, source_job: source_job, source_project: project, pipeline: child_pipeline, project: project) }
before do
stub_const("#{described_class}::BATCH_SIZE", 1)
stub_feature_flags(ci_update_unlocked_job_artifacts: ci_update_unlocked_job_artifacts)
end
[true, false].each do |tag|
context "when tag is #{tag}" do
let(:ref) { 'master' }
let(:ref_path) { tag ? "#{::Gitlab::Git::TAG_REF_PREFIX}#{ref}" : "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{ref}" }
let(:ci_ref) { create(:ci_ref, ref_path: ref_path) }
describe '#execute' do
subject(:execute) { described_class.new(pipeline.project, pipeline.user).execute(ci_ref, before_pipeline) }
let!(:old_unlocked_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :unlocked) }
let!(:older_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
let!(:older_ambiguous_pipeline) { create(:ci_pipeline, ref: ref, tag: !tag, project: ci_ref.project, locked: :artifacts_locked) }
let!(:pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
let!(:child_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
let!(:newer_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
let!(:other_ref_pipeline) { create(:ci_pipeline, ref: 'other_ref', tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
context 'when running on a ref before a pipeline' do
let(:before_pipeline) { pipeline }
before do
create(:ci_sources_pipeline,
source_job: create(:ci_build, pipeline: pipeline),
source_project: ci_ref.project,
pipeline: child_pipeline,
project: ci_ref.project)
it 'unlocks artifacts from older pipelines' do
expect { execute }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
end
context 'when running on a ref before a pipeline' do
let(:before_pipeline) { pipeline }
it 'unlocks artifacts from older pipelines' do
expect { execute }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
end
it 'does not unlock artifacts for tag or branch with same name as ref' do
expect { execute }.not_to change { older_ambiguous_pipeline.reload.locked }.from('artifacts_locked')
end
it 'does not unlock artifacts from newer pipelines' do
expect { execute }.not_to change { newer_pipeline.reload.locked }.from('artifacts_locked')
end
it 'does not lock artifacts from old unlocked pipelines' do
expect { execute }.not_to change { old_unlocked_pipeline.reload.locked }.from('unlocked')
end
it 'does not unlock artifacts from the same pipeline' do
expect { execute }.not_to change { pipeline.reload.locked }.from('artifacts_locked')
end
it 'does not unlock artifacts for other refs' do
expect { execute }.not_to change { other_ref_pipeline.reload.locked }.from('artifacts_locked')
end
it 'does not unlock artifacts for child pipeline' do
expect { execute }.not_to change { child_pipeline.reload.locked }.from('artifacts_locked')
end
it 'does not unlock artifacts for tag or branch with same name as ref' do
expect { execute }.not_to change { older_ambiguous_pipeline.reload.locked }.from('artifacts_locked')
end
context 'when running on just the ref' do
let(:before_pipeline) { nil }
it 'does not unlock artifacts from newer pipelines' do
expect { execute }.not_to change { newer_pipeline.reload.locked }.from('artifacts_locked')
end
it 'unlocks artifacts from older pipelines' do
expect { execute }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
end
it 'does not lock artifacts from old unlocked pipelines' do
expect { execute }.not_to change { old_unlocked_pipeline.reload.locked }.from('unlocked')
end
it 'unlocks artifacts from newer pipelines' do
expect { execute }.to change { newer_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
end
it 'does not unlock artifacts from the same pipeline' do
expect { execute }.not_to change { pipeline.reload.locked }.from('artifacts_locked')
end
it 'unlocks artifacts from the same pipeline' do
expect { execute }.to change { pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
end
it 'does not unlock artifacts for other refs' do
expect { execute }.not_to change { other_ref_pipeline.reload.locked }.from('artifacts_locked')
end
it 'does not unlock artifacts for tag or branch with same name as ref' do
expect { execute }.not_to change { older_ambiguous_pipeline.reload.locked }.from('artifacts_locked')
end
it 'does not unlock artifacts for child pipeline' do
expect { execute }.not_to change { child_pipeline.reload.locked }.from('artifacts_locked')
end
it 'does not lock artifacts from old unlocked pipelines' do
expect { execute }.not_to change { old_unlocked_pipeline.reload.locked }.from('unlocked')
end
it 'unlocks job artifact records' do
pending unless ci_update_unlocked_job_artifacts
it 'does not unlock artifacts for other refs' do
expect { execute }.not_to change { other_ref_pipeline.reload.locked }.from('artifacts_locked')
end
expect { execute }.to change { ::Ci::JobArtifact.artifact_unlocked.count }.from(0).to(2)
end
end
context 'when running on just the ref' do
let(:before_pipeline) { nil }
it 'unlocks artifacts from older pipelines' do
expect { execute }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
end
it 'unlocks artifacts from newer pipelines' do
expect { execute }.to change { newer_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
end
it 'unlocks artifacts from the same pipeline' do
expect { execute }.to change { pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
end
it 'does not unlock artifacts for tag or branch with same name as ref' do
expect { execute }.not_to change { older_ambiguous_pipeline.reload.locked }.from('artifacts_locked')
end
it 'does not lock artifacts from old unlocked pipelines' do
expect { execute }.not_to change { old_unlocked_pipeline.reload.locked }.from('unlocked')
end
it 'does not unlock artifacts for other refs' do
expect { execute }.not_to change { other_ref_pipeline.reload.locked }.from('artifacts_locked')
end
it 'unlocks job artifact records' do
pending unless ci_update_unlocked_job_artifacts
expect { execute }.to change { ::Ci::JobArtifact.artifact_unlocked.count }.from(0).to(8)
end
end
end
describe '#unlock_pipelines_query' do
subject { described_class.new(pipeline.project, pipeline.user).unlock_pipelines_query(ci_ref, before_pipeline) }
context 'when running on a ref before a pipeline' do
let(:before_pipeline) { pipeline }
it 'produces the expected SQL string' do
expect(subject.squish).to eq <<~SQL.squish
UPDATE
"ci_pipelines"
SET
"locked" = 0
WHERE
"ci_pipelines"."id" IN
(SELECT
"ci_pipelines"."id"
FROM
"ci_pipelines"
WHERE
"ci_pipelines"."ci_ref_id" = #{ci_ref.id}
AND "ci_pipelines"."locked" = 1
AND (ci_pipelines.id < #{before_pipeline.id})
AND "ci_pipelines"."id" NOT IN
(WITH RECURSIVE
"base_and_descendants"
AS
((SELECT
"ci_pipelines".*
FROM
"ci_pipelines"
WHERE
"ci_pipelines"."id" = #{before_pipeline.id})
UNION
(SELECT
"ci_pipelines".*
FROM
"ci_pipelines",
"base_and_descendants",
"ci_sources_pipelines"
WHERE
"ci_sources_pipelines"."pipeline_id" = "ci_pipelines"."id"
AND "ci_sources_pipelines"."source_pipeline_id" = "base_and_descendants"."id"
AND "ci_sources_pipelines"."source_project_id" = "ci_sources_pipelines"."project_id"))
SELECT
"id"
FROM
"base_and_descendants"
AS
"ci_pipelines")
LIMIT 1
FOR UPDATE
SKIP LOCKED)
RETURNING ("ci_pipelines"."id")
SQL
end
end
context 'when running on just the ref' do
let(:before_pipeline) { nil }
it 'produces the expected SQL string' do
expect(subject.squish).to eq <<~SQL.squish
UPDATE
"ci_pipelines"
SET
"locked" = 0
WHERE
"ci_pipelines"."id" IN
(SELECT
"ci_pipelines"."id"
FROM
"ci_pipelines"
WHERE
"ci_pipelines"."ci_ref_id" = #{ci_ref.id}
AND "ci_pipelines"."locked" = 1
LIMIT 1
FOR UPDATE
SKIP LOCKED)
RETURNING
("ci_pipelines"."id")
SQL
end
end
end
describe '#unlock_job_artifacts_query' do
subject { described_class.new(pipeline.project, pipeline.user).unlock_job_artifacts_query(pipeline_ids) }
context 'when running on a ref before a pipeline' do
let(:before_pipeline) { pipeline }
let(:pipeline_ids) { [older_pipeline.id] }
it 'produces the expected SQL string' do
expect(subject.squish).to eq <<~SQL.squish
UPDATE
"ci_job_artifacts"
SET
"locked" = 0
WHERE
"ci_job_artifacts"."job_id" IN
(SELECT
"ci_builds"."id"
FROM
"ci_builds"
WHERE
"ci_builds"."type" = 'Ci::Build'
AND "ci_builds"."commit_id" = #{older_pipeline.id})
RETURNING
("ci_job_artifacts"."id")
SQL
end
end
context 'when running on just the ref' do
let(:before_pipeline) { nil }
let(:pipeline_ids) { [older_pipeline.id, newer_pipeline.id, pipeline.id] }
it 'produces the expected SQL string' do
expect(subject.squish).to eq <<~SQL.squish
UPDATE
"ci_job_artifacts"
SET
"locked" = 0
WHERE
"ci_job_artifacts"."job_id" IN
(SELECT
"ci_builds"."id"
FROM
"ci_builds"
WHERE
"ci_builds"."type" = 'Ci::Build'
AND "ci_builds"."commit_id" IN (#{pipeline_ids.join(', ')}))
RETURNING
("ci_job_artifacts"."id")
SQL
end
end
end

View File

@ -207,13 +207,5 @@ RSpec.describe Projects::ParticipantsService do
end
it_behaves_like 'return project members'
context 'when feature flag :linear_participants_service_ancestor_scopes is disabled' do
before do
stub_feature_flags(linear_participants_service_ancestor_scopes: false)
end
it_behaves_like 'return project members'
end
end
end

View File

@ -52,7 +52,7 @@ module AccessMatchers
emulate_user(user, @membership)
visit(url)
status_code == 200 && !current_path.in?([new_user_session_path, new_admin_session_path])
[200, 204].include?(status_code) && !current_path.in?([new_user_session_path, new_admin_session_path])
end
chain :of do |membership|

View File

@ -808,7 +808,19 @@ RSpec.shared_examples 'trace with enabled live trace feature' do
create(:ci_job_artifact, :trace, job: build)
end
it { is_expected.to be_truthy }
it 'is truthy' do
is_expected.to be_truthy
end
end
context 'when archived trace record exists but file is not stored' do
before do
create(:ci_job_artifact, :unarchived_trace_artifact, job: build)
end
it 'is falsy' do
is_expected.to be_falsy
end
end
context 'when live trace exists' do
@ -872,13 +884,35 @@ RSpec.shared_examples 'trace with enabled live trace feature' do
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
expect(stream.read).to eq(trace_raw)
end
end
end
shared_examples 'a pre-commit error' do |error:|
it_behaves_like 'source trace in ChunkedIO stays intact', error: error
it 'does not save the trace artifact' do
expect { subject }.to raise_error(error)
build.reload
expect(build.job_artifacts_trace).to be_nil
end
end
shared_examples 'a post-commit error' do |error:|
it_behaves_like 'source trace in ChunkedIO stays intact', error: error
it 'saves the trace artifact but not the file' do
expect { subject }.to raise_error(error)
build.reload
expect(build.job_artifacts_trace).to be_present
expect(build.job_artifacts_trace.file.exists?).to be_falsy
end
end
context 'when job does not have trace artifact' do
context 'when trace is stored in ChunkedIO' do
let!(:build) { create(:ci_build, :success, :trace_live) }
@ -892,7 +926,7 @@ RSpec.shared_examples 'trace with enabled live trace feature' do
allow(IO).to receive(:copy_stream).and_return(0)
end
it_behaves_like 'source trace in ChunkedIO stays intact', error: Gitlab::Ci::Trace::ArchiveError
it_behaves_like 'a pre-commit error', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
@ -902,7 +936,16 @@ RSpec.shared_examples 'trace with enabled live trace feature' do
.and_return(%w[Error Error])
end
it_behaves_like 'source trace in ChunkedIO stays intact', error: ActiveRecord::RecordInvalid
it_behaves_like 'a pre-commit error', error: ActiveRecord::RecordInvalid
end
context 'when storing the file raises an error' do
before do
stub_artifacts_object_storage(direct_upload: true)
allow_any_instance_of(Ci::JobArtifact).to receive(:store_file!).and_raise(Excon::Error::BadGateway, 'S3 is down lol')
end
it_behaves_like 'a post-commit error', error: Excon::Error::BadGateway
end
end
end

View File

@ -85,7 +85,7 @@ RSpec.shared_examples 'deduplicating jobs when scheduling' do |strategy_name|
allow(fake_duplicate_job).to receive(:scheduled_at).and_return(Time.now + time_diff)
allow(fake_duplicate_job).to receive(:options).and_return({ including_scheduled: true })
allow(fake_duplicate_job).to(
receive(:check!).with(time_diff.to_i).and_return('the jid'))
receive(:check!).with(time_diff.to_i + fake_duplicate_job.duplicate_key_ttl).and_return('the jid'))
allow(fake_duplicate_job).to receive(:idempotent?).and_return(true)
allow(fake_duplicate_job).to receive(:update_latest_wal_location!)
allow(fake_duplicate_job).to receive(:set_deduplicated_flag!)

View File

@ -4,7 +4,9 @@ require 'spec_helper'
RSpec.describe Ci::RefDeleteUnlockArtifactsWorker do
describe '#perform' do
subject(:perform) { described_class.new.perform(project_id, user_id, ref) }
subject(:perform) { worker.perform(project_id, user_id, ref) }
let(:worker) { described_class.new }
let(:ref) { 'refs/heads/master' }
@ -40,6 +42,36 @@ RSpec.describe Ci::RefDeleteUnlockArtifactsWorker do
expect(service).to have_received(:execute).with(ci_ref)
end
context 'when a locked pipeline with persisted artifacts exists' do
let!(:pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: 'master', project: project, locked: :artifacts_locked) }
context 'with ci_update_unlocked_job_artifacts disabled' do
before do
stub_feature_flags(ci_update_unlocked_job_artifacts: false)
end
it 'logs the correct extra metadata' do
expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_pipelines, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_job_artifacts, 0)
perform
end
end
context 'with ci_update_unlocked_job_artifacts enabled' do
before do
stub_feature_flags(ci_update_unlocked_job_artifacts: true)
end
it 'logs the correct extra metadata' do
expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_pipelines, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_job_artifacts, 2)
perform
end
end
end
end
context 'when ci ref does not exist for the given project' do

View File

@ -37,6 +37,15 @@ RSpec.describe EmailReceiverWorker, :mailer do
expect(email.to).to eq(["jake@adventuretime.ooo"])
expect(email.subject).to include("Rejected")
end
it 'strips out the body before passing to EmailRejectionMailer' do
mail = Mail.new(raw_message)
mail.body = nil
expect(EmailRejectionMailer).to receive(:rejection).with(anything, mail.encoded, anything).and_call_original
described_class.new.perform(raw_message)
end
end
context 'when the error is Gitlab::Email::AutoGeneratedEmailError' do

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe IssueRebalancingWorker do
RSpec.describe IssueRebalancingWorker, :clean_gitlab_redis_shared_state do
describe '#perform' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
@ -35,6 +35,20 @@ RSpec.describe IssueRebalancingWorker do
described_class.new.perform # all arguments are nil
end
it 'does not schedule a new rebalance if it finished under 1h ago' do
container_type = arguments.second.present? ? ::Gitlab::Issues::Rebalancing::State::PROJECT : ::Gitlab::Issues::Rebalancing::State::NAMESPACE
container_id = arguments.second || arguments.third
Gitlab::Redis::SharedState.with do |redis|
redis.set(::Gitlab::Issues::Rebalancing::State.send(:recently_finished_key, container_type, container_id), true)
end
expect(Issues::RelativePositionRebalancingService).not_to receive(:new)
expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
described_class.new.perform(*arguments)
end
end
shared_examples 'safely handles non-existent ids' do

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Issues::RescheduleStuckIssueRebalancesWorker, :clean_gitlab_redis_shared_state do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
subject(:worker) { described_class.new }
describe '#perform' do
it 'does not schedule a rebalance' do
expect(IssueRebalancingWorker).not_to receive(:perform_async)
worker.perform
end
it 'schedules a rebalance in case there are any rebalances started' do
expect(::Gitlab::Issues::Rebalancing::State).to receive(:fetch_rebalancing_groups_and_projects).and_return([[group.id], [project.id]])
expect(IssueRebalancingWorker).to receive(:bulk_perform_async).with([[nil, nil, group.id]]).once
expect(IssueRebalancingWorker).to receive(:bulk_perform_async).with([[nil, project.id, nil]]).once
worker.perform
end
end
end