Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
f20be8802a
commit
c38fb401d2
|
|
@ -0,0 +1,232 @@
|
|||
<script>
|
||||
import { GlAlert, GlButton, GlDrawer, GlFormGroup, GlFormInput } from '@gitlab/ui';
|
||||
import { get as getPropValueByPath, isEmpty } from 'lodash';
|
||||
import { produce } from 'immer';
|
||||
import { MountingPortal } from 'portal-vue';
|
||||
import { __ } from '~/locale';
|
||||
import { logError } from '~/lib/logger';
|
||||
import { getFirstPropertyValue } from '~/lib/utils/common_utils';
|
||||
import { INDEX_ROUTE_NAME } from '../constants';
|
||||
|
||||
const MSG_SAVE_CHANGES = __('Save changes');
|
||||
const MSG_ERROR = __('Something went wrong. Please try again.');
|
||||
const MSG_OPTIONAL = __('(optional)');
|
||||
const MSG_CANCEL = __('Cancel');
|
||||
|
||||
/**
|
||||
* This component is a first iteration towards a general reusable Create/Update component
|
||||
*
|
||||
* There's some opportunity to improve cohesion of this module which we are planning
|
||||
* to address after solidifying the abstraction's requirements.
|
||||
*
|
||||
* Please see https://gitlab.com/gitlab-org/gitlab/-/issues/349441
|
||||
*/
|
||||
export default {
|
||||
components: {
|
||||
GlAlert,
|
||||
GlButton,
|
||||
GlDrawer,
|
||||
GlFormGroup,
|
||||
GlFormInput,
|
||||
MountingPortal,
|
||||
},
|
||||
props: {
|
||||
drawerOpen: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
fields: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
title: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
successMessage: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
mutation: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
getQuery: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
getQueryNodePath: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
existingModel: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: () => ({}),
|
||||
},
|
||||
additionalCreateParams: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: () => ({}),
|
||||
},
|
||||
buttonLabel: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: () => MSG_SAVE_CHANGES,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
const initialModel = this.fields.reduce(
|
||||
(map, field) =>
|
||||
Object.assign(map, {
|
||||
[field.name]: this.existingModel ? this.existingModel[field.name] : null,
|
||||
}),
|
||||
{},
|
||||
);
|
||||
|
||||
return {
|
||||
model: initialModel,
|
||||
submitting: false,
|
||||
errorMessages: [],
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
isEditMode() {
|
||||
return this.existingModel?.id;
|
||||
},
|
||||
isInvalid() {
|
||||
const { fields, model } = this;
|
||||
|
||||
return fields.some((field) => {
|
||||
return field.required && isEmpty(model[field.name]);
|
||||
});
|
||||
},
|
||||
variables() {
|
||||
const { additionalCreateParams, fields, isEditMode, model } = this;
|
||||
|
||||
const variables = fields.reduce(
|
||||
(map, field) =>
|
||||
Object.assign(map, {
|
||||
[field.name]: this.formatValue(model, field),
|
||||
}),
|
||||
{},
|
||||
);
|
||||
|
||||
if (isEditMode) {
|
||||
return { input: { id: this.existingModel.id, ...variables } };
|
||||
}
|
||||
|
||||
return { input: { ...additionalCreateParams, ...variables } };
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
formatValue(model, field) {
|
||||
if (!isEmpty(model[field.name]) && field.input?.type === 'number') {
|
||||
return parseFloat(model[field.name]);
|
||||
}
|
||||
|
||||
return model[field.name];
|
||||
},
|
||||
save() {
|
||||
const { mutation, variables, close } = this;
|
||||
|
||||
this.submitting = true;
|
||||
|
||||
return this.$apollo
|
||||
.mutate({
|
||||
mutation,
|
||||
variables,
|
||||
update: (store, { data }) => {
|
||||
const { errors, ...result } = getFirstPropertyValue(data);
|
||||
|
||||
if (errors?.length) {
|
||||
this.errorMessages = errors;
|
||||
} else {
|
||||
this.updateCache(store, result);
|
||||
close(true);
|
||||
}
|
||||
},
|
||||
})
|
||||
.catch((e) => {
|
||||
logError(e);
|
||||
this.errorMessages = [MSG_ERROR];
|
||||
})
|
||||
.finally(() => {
|
||||
this.submitting = false;
|
||||
});
|
||||
},
|
||||
close(success) {
|
||||
if (success) {
|
||||
// This is needed so toast perists when route is changed
|
||||
this.$root.$toast.show(this.successMessage);
|
||||
}
|
||||
|
||||
this.$router.replace({ name: this.$options.INDEX_ROUTE_NAME });
|
||||
},
|
||||
updateCache(store, result) {
|
||||
const { getQuery, isEditMode, getQueryNodePath } = this;
|
||||
|
||||
if (isEditMode || !getQuery) return;
|
||||
|
||||
const sourceData = store.readQuery(getQuery);
|
||||
|
||||
const newData = produce(sourceData, (draftState) => {
|
||||
getPropValueByPath(draftState, getQueryNodePath).nodes.push(getFirstPropertyValue(result));
|
||||
});
|
||||
|
||||
store.writeQuery({
|
||||
...getQuery,
|
||||
data: newData,
|
||||
});
|
||||
},
|
||||
getFieldLabel(field) {
|
||||
const optionalSuffix = field.required ? '' : ` ${MSG_OPTIONAL}`;
|
||||
return field.label + optionalSuffix;
|
||||
},
|
||||
},
|
||||
MSG_CANCEL,
|
||||
INDEX_ROUTE_NAME,
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<mounting-portal mount-to="#js-crm-form-portal" append>
|
||||
<gl-drawer class="gl-drawer-responsive gl-absolute" :open="drawerOpen" @close="close(false)">
|
||||
<template #title>
|
||||
<h3>{{ title }}</h3>
|
||||
</template>
|
||||
<gl-alert v-if="errorMessages.length" variant="danger" @dismiss="errorMessages = []">
|
||||
<ul class="gl-mb-0! gl-ml-5">
|
||||
<li v-for="error in errorMessages" :key="error">
|
||||
{{ error }}
|
||||
</li>
|
||||
</ul>
|
||||
</gl-alert>
|
||||
<form @submit.prevent="save">
|
||||
<gl-form-group
|
||||
v-for="field in fields"
|
||||
:key="field.name"
|
||||
:label="getFieldLabel(field)"
|
||||
:label-for="field.name"
|
||||
>
|
||||
<gl-form-input :id="field.name" v-bind="field.input" v-model="model[field.name]" />
|
||||
</gl-form-group>
|
||||
<span class="gl-float-right">
|
||||
<gl-button data-testid="cancel-button" @click="close(false)">
|
||||
{{ $options.MSG_CANCEL }}
|
||||
</gl-button>
|
||||
<gl-button
|
||||
variant="confirm"
|
||||
:disabled="isInvalid"
|
||||
:loading="submitting"
|
||||
data-testid="save-button"
|
||||
type="submit"
|
||||
>{{ buttonLabel }}</gl-button
|
||||
>
|
||||
</span>
|
||||
</form>
|
||||
</gl-drawer>
|
||||
</mounting-portal>
|
||||
</template>
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
<script>
|
||||
import { mapState } from 'vuex';
|
||||
import { GlBadge } from '@gitlab/ui';
|
||||
import { helpPagePath } from '~/helpers/help_page_helper';
|
||||
import { timeIntervalInWords } from '~/lib/utils/datetime_utility';
|
||||
import { __, sprintf } from '~/locale';
|
||||
|
|
@ -10,6 +11,7 @@ export default {
|
|||
name: 'JobSidebarDetailsContainer',
|
||||
components: {
|
||||
DetailRow,
|
||||
GlBadge,
|
||||
},
|
||||
mixins: [timeagoMixin],
|
||||
computed: {
|
||||
|
|
@ -100,12 +102,7 @@ export default {
|
|||
|
||||
<p v-if="hasTags" class="build-detail-row" data-testid="job-tags">
|
||||
<span class="font-weight-bold">{{ __('Tags:') }}</span>
|
||||
<span
|
||||
v-for="(tag, i) in job.tags"
|
||||
:key="i"
|
||||
class="badge badge-pill badge-primary gl-badge sm"
|
||||
>{{ tag }}</span
|
||||
>
|
||||
<gl-badge v-for="(tag, i) in job.tags" :key="i" variant="info">{{ tag }}</gl-badge>
|
||||
</p>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -746,3 +746,12 @@ export const isLoggedIn = () => Boolean(window.gon?.current_user_id);
|
|||
*/
|
||||
export const convertArrayOfObjectsToCamelCase = (array) =>
|
||||
array.map((o) => convertObjectPropsToCamelCase(o));
|
||||
|
||||
export const getFirstPropertyValue = (data) => {
|
||||
if (!data) return null;
|
||||
|
||||
const [key] = Object.keys(data);
|
||||
if (!key) return null;
|
||||
|
||||
return data[key];
|
||||
};
|
||||
|
|
|
|||
|
|
@ -9,9 +9,9 @@ class Admin::RunnerProjectsController < Admin::ApplicationController
|
|||
@runner = Ci::Runner.find(params[:runner_project][:runner_id])
|
||||
|
||||
if @runner.assign_to(@project, current_user)
|
||||
redirect_to admin_runner_path(@runner), notice: s_('Runners|Runner assigned to project.')
|
||||
redirect_to edit_admin_runner_url(@runner), notice: s_('Runners|Runner assigned to project.')
|
||||
else
|
||||
redirect_to admin_runner_path(@runner), alert: 'Failed adding runner to project'
|
||||
redirect_to edit_admin_runner_url(@runner), alert: 'Failed adding runner to project'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -20,7 +20,7 @@ class Admin::RunnerProjectsController < Admin::ApplicationController
|
|||
runner = rp.runner
|
||||
rp.destroy
|
||||
|
||||
redirect_to admin_runner_path(runner), status: :found, notice: s_('Runners|Runner unassigned from project.')
|
||||
redirect_to edit_admin_runner_url(runner), status: :found, notice: s_('Runners|Runner unassigned from project.')
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -11,13 +11,21 @@ class Admin::RunnersController < Admin::ApplicationController
|
|||
end
|
||||
|
||||
def show
|
||||
# We will show runner details in a read-only view in
|
||||
# future iterations. For now, this route will have a
|
||||
# redirect until this new view is developed. See more:
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/347856
|
||||
redirect_to edit_admin_runner_path(runner)
|
||||
end
|
||||
|
||||
def edit
|
||||
assign_builds_and_projects
|
||||
end
|
||||
|
||||
def update
|
||||
if Ci::UpdateRunnerService.new(@runner).update(runner_params)
|
||||
respond_to do |format|
|
||||
format.html { redirect_to admin_runner_path(@runner) }
|
||||
format.html { redirect_to edit_admin_runner_path(@runner) }
|
||||
end
|
||||
else
|
||||
assign_builds_and_projects
|
||||
|
|
|
|||
|
|
@ -9,7 +9,11 @@ module Projects
|
|||
|
||||
def show
|
||||
@package = project.packages.find(params[:id])
|
||||
@package_files = @package.package_files.recent
|
||||
@package_files = if Feature.enabled?(:packages_installable_package_files)
|
||||
@package.installable_package_files.recent
|
||||
else
|
||||
@package.package_files.recent
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -19,7 +19,11 @@ class Packages::PackageFileFinder
|
|||
private
|
||||
|
||||
def package_files
|
||||
files = package.package_files
|
||||
files = if Feature.enabled?(:packages_installable_package_files)
|
||||
package.installable_package_files
|
||||
else
|
||||
package.package_files
|
||||
end
|
||||
|
||||
by_file_name(files)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,15 +7,7 @@ class UserGroupNotificationSettingsFinder
|
|||
end
|
||||
|
||||
def execute
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
selected_groups = Group.where(id: groups.select(:id))
|
||||
groups_with_ancestors = if Feature.enabled?(:linear_user_group_notification_settings_finder_ancestors_scopes, user, default_enabled: :yaml)
|
||||
selected_groups.self_and_ancestors
|
||||
else
|
||||
Gitlab::ObjectHierarchy.new(selected_groups).base_and_ancestors
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
groups_with_ancestors = groups.self_and_ancestors
|
||||
@loaded_groups_with_ancestors = groups_with_ancestors.index_by(&:id)
|
||||
@loaded_notification_settings = user.notification_settings_for_groups(groups_with_ancestors).preload_source_route.index_by(&:source_id)
|
||||
|
||||
|
|
|
|||
|
|
@ -24,6 +24,14 @@ module Types
|
|||
def versions
|
||||
object.versions
|
||||
end
|
||||
|
||||
def package_files
|
||||
if Feature.enabled?(:packages_installable_package_files)
|
||||
object.installable_package_files
|
||||
else
|
||||
object.package_files
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -96,6 +96,15 @@ module Packages
|
|||
architectures.pluck(:name).sort
|
||||
end
|
||||
|
||||
def package_files
|
||||
if Feature.enabled?(:packages_installable_package_files)
|
||||
::Packages::PackageFile.installable
|
||||
.for_package_ids(packages.select(:id))
|
||||
else
|
||||
::Packages::PackageFile.for_package_ids(packages.select(:id))
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def unique_codename_and_suite
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Packages
|
||||
# This module requires a status column.
|
||||
# It also requires a constant INSTALLABLE_STATUSES. This should be
|
||||
# an array that defines which values of the status column are
|
||||
# considered as installable.
|
||||
module Installable
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
included do
|
||||
scope :with_status, ->(status) { where(status: status) }
|
||||
scope :installable, -> { with_status(const_get(:INSTALLABLE_STATUSES, false)) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -12,8 +12,4 @@ class Packages::Debian::GroupDistribution < ApplicationRecord
|
|||
.for_projects(group.all_projects.public_only)
|
||||
.with_debian_codename(codename)
|
||||
end
|
||||
|
||||
def package_files
|
||||
::Packages::PackageFile.for_package_ids(packages.select(:id))
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,5 +9,4 @@ class Packages::Debian::ProjectDistribution < ApplicationRecord
|
|||
|
||||
has_many :publications, class_name: 'Packages::Debian::Publication', inverse_of: :distribution, foreign_key: :distribution_id
|
||||
has_many :packages, class_name: 'Packages::Package', through: :publications
|
||||
has_many :package_files, class_name: 'Packages::PackageFile', through: :packages
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ class Packages::Package < ApplicationRecord
|
|||
include Gitlab::SQL::Pattern
|
||||
include UsageStatistics
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
include Packages::Installable
|
||||
|
||||
DISPLAYABLE_STATUSES = [:default, :error].freeze
|
||||
INSTALLABLE_STATUSES = [:default, :hidden].freeze
|
||||
|
|
@ -31,6 +32,9 @@ class Packages::Package < ApplicationRecord
|
|||
|
||||
# package_files must be destroyed by ruby code in order to properly remove carrierwave uploads and update project statistics
|
||||
has_many :package_files, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
|
||||
# TODO: put the installable default scope on the :package_files association once the dependent: :destroy is removed
|
||||
# See https://gitlab.com/gitlab-org/gitlab/-/issues/349191
|
||||
has_many :installable_package_files, -> { installable }, class_name: 'Packages::PackageFile', inverse_of: :package
|
||||
has_many :dependency_links, inverse_of: :package, class_name: 'Packages::DependencyLink'
|
||||
has_many :tags, inverse_of: :package, class_name: 'Packages::Tag'
|
||||
has_one :conan_metadatum, inverse_of: :package, class_name: 'Packages::Conan::Metadatum'
|
||||
|
|
@ -100,9 +104,7 @@ class Packages::Package < ApplicationRecord
|
|||
scope :without_version_like, -> (version) { where.not(arel_table[:version].matches(version)) }
|
||||
scope :with_package_type, ->(package_type) { where(package_type: package_type) }
|
||||
scope :without_package_type, ->(package_type) { where.not(package_type: package_type) }
|
||||
scope :with_status, ->(status) { where(status: status) }
|
||||
scope :displayable, -> { with_status(DISPLAYABLE_STATUSES) }
|
||||
scope :installable, -> { with_status(INSTALLABLE_STATUSES) }
|
||||
scope :including_project_route, -> { includes(project: { namespace: :route }) }
|
||||
scope :including_tags, -> { includes(:tags) }
|
||||
scope :including_dependency_links, -> { includes(dependency_links: :dependency) }
|
||||
|
|
@ -131,7 +133,7 @@ class Packages::Package < ApplicationRecord
|
|||
scope :without_nuget_temporary_name, -> { where.not(name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) }
|
||||
|
||||
scope :has_version, -> { where.not(version: nil) }
|
||||
scope :preload_files, -> { preload(:package_files) }
|
||||
scope :preload_files, -> { Feature.enabled?(:packages_installable_package_files) ? preload(:installable_package_files) : preload(:package_files) }
|
||||
scope :preload_pipelines, -> { preload(pipelines: :user) }
|
||||
scope :last_of_each_version, -> { where(id: all.select('MAX(id) AS id').group(:version)) }
|
||||
scope :limit_recent, ->(limit) { order_created_desc.limit(limit) }
|
||||
|
|
|
|||
|
|
@ -2,12 +2,17 @@
|
|||
class Packages::PackageFile < ApplicationRecord
|
||||
include UpdateProjectStatistics
|
||||
include FileStoreMounter
|
||||
include Packages::Installable
|
||||
|
||||
INSTALLABLE_STATUSES = [:default].freeze
|
||||
|
||||
delegate :project, :project_id, to: :package
|
||||
delegate :conan_file_type, to: :conan_file_metadatum
|
||||
delegate :file_type, :dsc?, :component, :architecture, :fields, to: :debian_file_metadatum, prefix: :debian
|
||||
delegate :channel, :metadata, to: :helm_file_metadatum, prefix: :helm
|
||||
|
||||
enum status: { default: 0, pending_destruction: 1 }
|
||||
|
||||
belongs_to :package
|
||||
|
||||
# used to move the linked file within object storage
|
||||
|
|
@ -48,9 +53,12 @@ class Packages::PackageFile < ApplicationRecord
|
|||
end
|
||||
|
||||
scope :for_helm_with_channel, ->(project, channel) do
|
||||
joins(:package).merge(project.packages.helm.installable)
|
||||
.joins(:helm_file_metadatum)
|
||||
.where(packages_helm_file_metadata: { channel: channel })
|
||||
result = joins(:package)
|
||||
.merge(project.packages.helm.installable)
|
||||
.joins(:helm_file_metadatum)
|
||||
.where(packages_helm_file_metadata: { channel: channel })
|
||||
result = result.installable if Feature.enabled?(:packages_installable_package_files)
|
||||
result
|
||||
end
|
||||
|
||||
scope :with_conan_file_type, ->(file_type) do
|
||||
|
|
@ -94,14 +102,19 @@ class Packages::PackageFile < ApplicationRecord
|
|||
skip_callback :commit, :after, :remove_previously_stored_file, if: :execute_move_in_object_storage?
|
||||
after_commit :move_in_object_storage, if: :execute_move_in_object_storage?
|
||||
|
||||
# Returns the most recent package files for *each* of the given packages.
|
||||
# Returns the most recent installable package file for *each* of the given packages.
|
||||
# The order is not guaranteed.
|
||||
def self.most_recent_for(packages, extra_join: nil, extra_where: nil)
|
||||
cte_name = :packages_cte
|
||||
cte = Gitlab::SQL::CTE.new(cte_name, packages.select(:id))
|
||||
|
||||
package_files = ::Packages::PackageFile.limit_recent(1)
|
||||
.where(arel_table[:package_id].eq(Arel.sql("#{cte_name}.id")))
|
||||
package_files = if Feature.enabled?(:packages_installable_package_files)
|
||||
::Packages::PackageFile.installable.limit_recent(1)
|
||||
.where(arel_table[:package_id].eq(Arel.sql("#{cte_name}.id")))
|
||||
else
|
||||
::Packages::PackageFile.limit_recent(1)
|
||||
.where(arel_table[:package_id].eq(Arel.sql("#{cte_name}.id")))
|
||||
end
|
||||
|
||||
package_files = package_files.joins(extra_join) if extra_join
|
||||
package_files = package_files.where(extra_where) if extra_where
|
||||
|
|
|
|||
|
|
@ -80,7 +80,13 @@ module Packages
|
|||
def package_files
|
||||
return unless @package
|
||||
|
||||
@package_files ||= @package.package_files.preload_conan_file_metadata
|
||||
strong_memoize(:package_files) do
|
||||
if Feature.enabled?(:packages_installable_package_files)
|
||||
@package.installable_package_files.preload_conan_file_metadata
|
||||
else
|
||||
@package.package_files.preload_conan_file_metadata
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def matching_reference?(package_file)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ module Packages
|
|||
id: @package.id,
|
||||
created_at: @package.created_at,
|
||||
name: name,
|
||||
package_files: @package.package_files.map { |pf| build_package_file_view(pf) },
|
||||
package_files: package_file_views,
|
||||
package_type: @package.package_type,
|
||||
status: @package.status,
|
||||
project_id: @package.project_id,
|
||||
|
|
@ -38,6 +38,16 @@ module Packages
|
|||
|
||||
private
|
||||
|
||||
def package_file_views
|
||||
package_files = if Feature.enabled?(:packages_installable_package_files)
|
||||
@package.installable_package_files
|
||||
else
|
||||
@package.package_files
|
||||
end
|
||||
|
||||
package_files.map { |pf| build_package_file_view(pf) }
|
||||
end
|
||||
|
||||
def build_package_file_view(package_file)
|
||||
file_view = {
|
||||
created_at: package_file.created_at,
|
||||
|
|
|
|||
|
|
@ -26,7 +26,11 @@ module Packages
|
|||
.preload_npm_metadatum
|
||||
|
||||
batched_packages.each do |package|
|
||||
package_file = package.package_files.last
|
||||
package_file = if Feature.enabled?(:packages_installable_package_files)
|
||||
package.installable_package_files.last
|
||||
else
|
||||
package.package_files.last
|
||||
end
|
||||
|
||||
next unless package_file
|
||||
|
||||
|
|
|
|||
|
|
@ -27,12 +27,19 @@ module Packages
|
|||
end
|
||||
|
||||
def archive_url_for(package)
|
||||
package_files = if Feature.enabled?(:packages_installable_package_files)
|
||||
package.installable_package_files
|
||||
else
|
||||
package.package_files
|
||||
end
|
||||
|
||||
package_filename = package_files.with_format(NUGET_PACKAGE_FORMAT).last&.file_name
|
||||
path = api_v4_projects_packages_nuget_download_package_name_package_version_package_filename_path(
|
||||
{
|
||||
id: package.project_id,
|
||||
package_name: package.name,
|
||||
package_version: package.version,
|
||||
package_filename: package.package_files.with_format(NUGET_PACKAGE_FORMAT).last&.file_name
|
||||
package_filename: package_filename
|
||||
},
|
||||
true
|
||||
)
|
||||
|
|
|
|||
|
|
@ -36,7 +36,13 @@ module Packages
|
|||
refs = []
|
||||
|
||||
@packages.map do |package|
|
||||
package.package_files.each do |file|
|
||||
package_files = if Feature.enabled?(:packages_installable_package_files)
|
||||
package.installable_package_files
|
||||
else
|
||||
package.package_files
|
||||
end
|
||||
|
||||
package_files.each do |file|
|
||||
url = build_pypi_package_path(file)
|
||||
|
||||
refs << package_link(url, package.pypi_metadatum.required_python, file.file_name)
|
||||
|
|
|
|||
|
|
@ -93,10 +93,15 @@ module Packages
|
|||
def metadata_package_file_for(package)
|
||||
return unless package
|
||||
|
||||
package.package_files
|
||||
.with_file_name(Metadata.filename)
|
||||
.recent
|
||||
.first
|
||||
package_files = if Feature.enabled?(:packages_installable_package_files)
|
||||
package.installable_package_files
|
||||
else
|
||||
package.package_files
|
||||
end
|
||||
|
||||
package_files.with_file_name(Metadata.filename)
|
||||
.recent
|
||||
.first
|
||||
end
|
||||
|
||||
def versionless_package_named(name)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
- add_page_specific_style 'page_bundles/ci_status'
|
||||
|
||||
- breadcrumb_title @runner.short_sha
|
||||
- page_title "##{@runner.id} (#{@runner.short_sha})"
|
||||
- title = "##{@runner.id} (#{@runner.short_sha})"
|
||||
- breadcrumb_title title
|
||||
- page_title title
|
||||
- add_to_breadcrumbs _('Runners'), admin_runners_path
|
||||
|
||||
#js-runner-details{ data: {runner_id: @runner.id} }
|
||||
|
|
@ -37,7 +38,7 @@
|
|||
|
||||
%tr
|
||||
%td
|
||||
= form_tag admin_runner_path(@runner), id: 'runner-projects-search', class: 'form-inline', method: :get do
|
||||
= form_tag edit_admin_runner_path(@runner), id: 'runner-projects-search', class: 'form-inline', method: :get do
|
||||
.input-group
|
||||
= search_field_tag :search, params[:search], class: 'form-control gl-form-input', spellcheck: false
|
||||
.input-group-append
|
||||
|
|
@ -92,7 +92,7 @@
|
|||
%h4.text-center
|
||||
= Gitlab.config.gitlab.no_todos_messages.sample
|
||||
%p
|
||||
= (_s("Todos|Are you looking for things to do? Take a look at %{strongStart}%{openIssuesLinkStart}open issues%{openIssuesLinkEnd}%{strongEnd}, contribute to %{strongStart}%{mergeRequestLinkStart}a merge request%{mergeRequestLinkEnd}%{mergeRequestLinkEnd}%{strongEnd}, or mention someone in a comment to automatically assign them a new to-do item.") % { strongStart: '<strong>', strongEnd: '</strong>', openIssuesLinkStart: "<a href=\"#{issues_dashboard_path}\">", openIssuesLinkEnd: '</a>', mergeRequestLinkStart: "<a href=\"#{merge_requests_dashboard_path}\">", mergeRequestLinkEnd: '</a>' }).html_safe
|
||||
= (s_("Todos|Are you looking for things to do? Take a look at %{strongStart}%{openIssuesLinkStart}open issues%{openIssuesLinkEnd}%{strongEnd}, contribute to %{strongStart}%{mergeRequestLinkStart}a merge request%{mergeRequestLinkEnd}%{mergeRequestLinkEnd}%{strongEnd}, or mention someone in a comment to automatically assign them a new to-do item.") % { strongStart: '<strong>', strongEnd: '</strong>', openIssuesLinkStart: "<a href=\"#{issues_dashboard_path}\">", openIssuesLinkEnd: '</a>', mergeRequestLinkStart: "<a href=\"#{merge_requests_dashboard_path}\">", mergeRequestLinkEnd: '</a>' }).html_safe
|
||||
- else
|
||||
%h4.text-center
|
||||
= s_("Todos|Nothing is on your to-do list. Nice work!")
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
---
|
||||
name: linear_user_group_notification_settings_finder_ancestors_scopes
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74606
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/345792
|
||||
name: packages_installable_package_files
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/76767
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/348677
|
||||
milestone: '14.6'
|
||||
type: development
|
||||
group: group::access
|
||||
group: group::package
|
||||
default_enabled: false
|
||||
|
|
@ -159,7 +159,7 @@ namespace :admin do
|
|||
|
||||
resources :labels
|
||||
|
||||
resources :runners, only: [:index, :show, :update, :destroy] do
|
||||
resources :runners, only: [:index, :show, :edit, :update, :destroy] do
|
||||
member do
|
||||
post :resume
|
||||
post :pause
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddStatusToPackagesPackageFiles < Gitlab::Database::Migration[1.0]
|
||||
def change
|
||||
add_column :packages_package_files, :status, :smallint, default: 0, null: false
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddStatusIndexToPackagesPackageFiles < Gitlab::Database::Migration[1.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_packages_package_files_on_package_id_status_and_id'
|
||||
|
||||
def up
|
||||
add_concurrent_index :packages_package_files, [:package_id, :status, :id], name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :packages_package_files, name: INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
fccb1d6c7ac4e31cecaf7bc2e23f13f6c8147a3820cbd996a545a5b01cc03865
|
||||
|
|
@ -0,0 +1 @@
|
|||
deec24bae35829454a09d4e97478c0b57d5f80e3271f96b2554b1ab10dc84d7f
|
||||
|
|
@ -17425,6 +17425,7 @@ CREATE TABLE packages_package_files (
|
|||
verification_checksum bytea,
|
||||
verification_state smallint DEFAULT 0 NOT NULL,
|
||||
verification_started_at timestamp with time zone,
|
||||
status smallint DEFAULT 0 NOT NULL,
|
||||
CONSTRAINT check_4c5e6bb0b3 CHECK ((file_store IS NOT NULL))
|
||||
);
|
||||
|
||||
|
|
@ -26921,6 +26922,8 @@ CREATE INDEX index_packages_package_files_on_package_id_and_file_name ON package
|
|||
|
||||
CREATE INDEX index_packages_package_files_on_package_id_id ON packages_package_files USING btree (package_id, id);
|
||||
|
||||
CREATE INDEX index_packages_package_files_on_package_id_status_and_id ON packages_package_files USING btree (package_id, status, id);
|
||||
|
||||
CREATE INDEX index_packages_package_files_on_verification_state ON packages_package_files USING btree (verification_state);
|
||||
|
||||
CREATE INDEX index_packages_packages_on_creator_id ON packages_packages USING btree (creator_id);
|
||||
|
|
|
|||
|
|
@ -170,6 +170,28 @@ Helm chart](https://gitlab.com/gitlab-org/charts/gitlab/), itself deployed with
|
|||
|
||||
See [Review Apps](../review_apps.md) for more details about Review Apps.
|
||||
|
||||
## Test metrics
|
||||
|
||||
For additional test health visibility, use a custom setup to export test execution
|
||||
results to your [InfluxDb](https://influxdb.quality.gitlab.net/) instance, and visualize
|
||||
results as [Grafana](https://dashboards.quality.gitlab.net/) dashboards.
|
||||
|
||||
### Provisioning
|
||||
|
||||
Provisioning of all components is performed by the
|
||||
[`engineering-productivity-infrastructure`](https://gitlab.com/gitlab-org/quality/engineering-productivity-infrastructure) project.
|
||||
|
||||
### Exporting metrics in CI
|
||||
|
||||
Use these environment variables to configure metrics export:
|
||||
|
||||
| Variable | Required | Information |
|
||||
| -------- | -------- | ----------- |
|
||||
| `QA_INFLUXDB_URL` | `true` | Should be set to `https://influxdb.quality.gitlab.net`. No default value. |
|
||||
| `QA_INFLUXDB_TOKEN` | `true` | InfluxDB write token that can be found under `Influxdb auth tokens` document in `Gitlab-QA` `1Password` vault. No default value. |
|
||||
| `QA_RUN_TYPE` | `false` | Arbitrary name for test execution, like `package-and-qa`. Automatically inferred from the project name for live environment test executions. No default value. |
|
||||
| `QA_EXPORT_TEST_METRICS` | `false` | Flag to enable or disable metrics export. Defaults to `true`. |
|
||||
|
||||
## Test reports
|
||||
|
||||
### Allure report
|
||||
|
|
|
|||
|
|
@ -28,10 +28,15 @@ module API
|
|||
package = ::Packages::PackageFinder
|
||||
.new(user_project, params[:package_id]).execute
|
||||
|
||||
files = package.package_files
|
||||
.preload_pipelines
|
||||
package_files = if Feature.enabled?(:packages_installable_package_files)
|
||||
package.installable_package_files
|
||||
else
|
||||
package.package_files
|
||||
end
|
||||
|
||||
present paginate(files), with: ::API::Entities::PackageFile
|
||||
package_files = package_files.preload_pipelines
|
||||
|
||||
present paginate(package_files), with: ::API::Entities::PackageFile
|
||||
end
|
||||
|
||||
desc 'Remove a package file' do
|
||||
|
|
@ -50,7 +55,13 @@ module API
|
|||
|
||||
not_found! unless package
|
||||
|
||||
package_file = package.package_files.find_by_id(params[:package_file_id])
|
||||
package_files = if Feature.enabled?(:packages_installable_package_files)
|
||||
package.installable_package_files
|
||||
else
|
||||
package.package_files
|
||||
end
|
||||
|
||||
package_file = package_files.find_by_id(params[:package_file_id])
|
||||
|
||||
not_found! unless package_file
|
||||
|
||||
|
|
|
|||
|
|
@ -66,9 +66,12 @@ module API
|
|||
get "gems/:file_name", requirements: FILE_NAME_REQUIREMENTS do
|
||||
authorize!(:read_package, user_project)
|
||||
|
||||
package_file = ::Packages::PackageFile.for_rubygem_with_file_name(
|
||||
user_project, params[:file_name]
|
||||
).last!
|
||||
package_files = ::Packages::PackageFile
|
||||
.for_rubygem_with_file_name(user_project, params[:file_name])
|
||||
|
||||
package_files = package_files.installable if Feature.enabled?(:packages_installable_package_files)
|
||||
|
||||
package_file = package_files.last!
|
||||
|
||||
track_package_event('pull_package', :rubygems, project: user_project, namespace: user_project.namespace)
|
||||
|
||||
|
|
|
|||
|
|
@ -71,7 +71,11 @@ module API
|
|||
|
||||
def package_file
|
||||
strong_memoize(:package_file) do
|
||||
package.package_files.first
|
||||
if Feature.enabled?(:packages_installable_package_files)
|
||||
package.installable_package_files.first
|
||||
else
|
||||
package.package_files.first
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,19 +16,6 @@ module Backup
|
|||
end
|
||||
end
|
||||
|
||||
class RepositoryBackupError < Backup::Error
|
||||
attr_reader :container, :backup_repos_path
|
||||
|
||||
def initialize(container, backup_repos_path)
|
||||
@container = container
|
||||
@backup_repos_path = backup_repos_path
|
||||
end
|
||||
|
||||
def message
|
||||
"Failed to create compressed file '#{backup_repos_path}' when trying to backup the following paths: '#{container.disk_path}'"
|
||||
end
|
||||
end
|
||||
|
||||
class DatabaseBackupError < Backup::Error
|
||||
attr_reader :config, :db_file_name
|
||||
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ module Backup
|
|||
report_success(success)
|
||||
progress.flush
|
||||
|
||||
raise Backup::Error, 'Backup failed' unless success
|
||||
raise DatabaseBackupError.new(config, db_file_name) unless success
|
||||
end
|
||||
|
||||
def restore
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ module Backup
|
|||
|
||||
unless status == 0
|
||||
puts output
|
||||
raise Backup::Error, 'Backup failed'
|
||||
raise_custom_error
|
||||
end
|
||||
|
||||
tar_cmd = [tar, exclude_dirs(:tar), %W[-C #{@backup_files_dir} -cf - .]].flatten
|
||||
|
|
@ -49,7 +49,7 @@ module Backup
|
|||
end
|
||||
|
||||
unless pipeline_succeeded?(tar_status: status_list[0], gzip_status: status_list[1], output: output)
|
||||
raise Backup::Error, "Backup operation failed: #{output}"
|
||||
raise_custom_error
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -143,5 +143,9 @@ module Backup
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
def raise_custom_error
|
||||
raise FileBackupError.new(app_files_dir, backup_tarball)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -22,6 +22,18 @@ ci_job_token_project_scope_links:
|
|||
- table: users
|
||||
column: added_by_id
|
||||
on_delete: async_nullify
|
||||
ci_daily_build_group_report_results:
|
||||
- table: namespaces
|
||||
column: group_id
|
||||
on_delete: async_delete
|
||||
ci_pending_builds:
|
||||
- table: namespaces
|
||||
column: namespace_id
|
||||
on_delete: async_delete
|
||||
ci_runner_namespaces:
|
||||
- table: namespaces
|
||||
column: namespace_id
|
||||
on_delete: async_delete
|
||||
ci_namespace_mirrors:
|
||||
- table: namespaces
|
||||
column: namespace_id
|
||||
|
|
|
|||
|
|
@ -135,8 +135,12 @@ namespace :gitlab do
|
|||
if ENV["SKIP"] && ENV["SKIP"].include?("db")
|
||||
puts_time "[SKIPPED]".color(:cyan)
|
||||
else
|
||||
Backup::Database.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
begin
|
||||
Backup::Database.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
rescue Backup::DatabaseBackupError => e
|
||||
progress.puts "#{e.message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -168,8 +172,12 @@ namespace :gitlab do
|
|||
if ENV["SKIP"] && ENV["SKIP"].include?("builds")
|
||||
puts_time "[SKIPPED]".color(:cyan)
|
||||
else
|
||||
Backup::Builds.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
begin
|
||||
Backup::Builds.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
rescue Backup::FileBackupError => e
|
||||
progress.puts "#{e.message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -187,8 +195,12 @@ namespace :gitlab do
|
|||
if ENV["SKIP"] && ENV["SKIP"].include?("uploads")
|
||||
puts_time "[SKIPPED]".color(:cyan)
|
||||
else
|
||||
Backup::Uploads.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
begin
|
||||
Backup::Uploads.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
rescue Backup::FileBackupError => e
|
||||
progress.puts "#{e.message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -206,8 +218,12 @@ namespace :gitlab do
|
|||
if ENV["SKIP"] && ENV["SKIP"].include?("artifacts")
|
||||
puts_time "[SKIPPED]".color(:cyan)
|
||||
else
|
||||
Backup::Artifacts.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
begin
|
||||
Backup::Artifacts.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
rescue Backup::FileBackupError => e
|
||||
progress.puts "#{e.message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -225,8 +241,12 @@ namespace :gitlab do
|
|||
if ENV["SKIP"] && ENV["SKIP"].include?("pages")
|
||||
puts_time "[SKIPPED]".color(:cyan)
|
||||
else
|
||||
Backup::Pages.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
begin
|
||||
Backup::Pages.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
rescue Backup::FileBackupError => e
|
||||
progress.puts "#{e.message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -244,8 +264,12 @@ namespace :gitlab do
|
|||
if ENV["SKIP"] && ENV["SKIP"].include?("lfs")
|
||||
puts_time "[SKIPPED]".color(:cyan)
|
||||
else
|
||||
Backup::Lfs.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
begin
|
||||
Backup::Lfs.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
rescue Backup::FileBackupError => e
|
||||
progress.puts "#{e.message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -283,8 +307,12 @@ namespace :gitlab do
|
|||
if ENV["SKIP"] && ENV["SKIP"].include?("registry")
|
||||
puts_time "[SKIPPED]".color(:cyan)
|
||||
else
|
||||
Backup::Registry.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
begin
|
||||
Backup::Registry.new(progress).dump
|
||||
puts_time "done".color(:green)
|
||||
rescue Backup::FileBackupError => e
|
||||
progress.puts "#{e.message}"
|
||||
end
|
||||
end
|
||||
else
|
||||
puts_time "[DISABLED]".color(:cyan)
|
||||
|
|
|
|||
|
|
@ -1160,6 +1160,9 @@ msgstr ""
|
|||
msgid "(max size 15 MB)"
|
||||
msgstr ""
|
||||
|
||||
msgid "(optional)"
|
||||
msgstr ""
|
||||
|
||||
msgid "(removed)"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -37161,6 +37164,9 @@ msgstr ""
|
|||
msgid "Today"
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|Are you looking for things to do? Take a look at %{strongStart}%{openIssuesLinkStart}open issues%{openIssuesLinkEnd}%{strongEnd}, contribute to %{strongStart}%{mergeRequestLinkStart}a merge request%{mergeRequestLinkEnd}%{mergeRequestLinkEnd}%{strongEnd}, or mention someone in a comment to automatically assign them a new to-do item."
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|Filter by author"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,59 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Admin::RunnerProjectsController do
|
||||
let_it_be(:group) { create(:group) }
|
||||
let_it_be(:project) { create(:project, group: group) }
|
||||
|
||||
before do
|
||||
sign_in(create(:admin))
|
||||
end
|
||||
|
||||
describe '#create' do
|
||||
let(:project_id) { project.path }
|
||||
|
||||
subject do
|
||||
post :create, params: {
|
||||
namespace_id: group.path,
|
||||
project_id: project_id,
|
||||
runner_project: { runner_id: project_runner.id }
|
||||
}
|
||||
end
|
||||
|
||||
context 'assigning runner to same project' do
|
||||
let(:project_runner) { create(:ci_runner, :project, projects: [project]) }
|
||||
|
||||
it 'redirects to the admin runner edit page' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:redirect)
|
||||
expect(response).to redirect_to edit_admin_runner_url(project_runner)
|
||||
end
|
||||
end
|
||||
|
||||
context 'assigning runner to another project' do
|
||||
let(:project_runner) { create(:ci_runner, :project, projects: [source_project]) }
|
||||
let(:source_project) { create(:project) }
|
||||
|
||||
it 'redirects to the admin runner edit page' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:redirect)
|
||||
expect(response).to redirect_to edit_admin_runner_url(project_runner)
|
||||
end
|
||||
end
|
||||
|
||||
context 'for unknown project' do
|
||||
let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project]) }
|
||||
|
||||
let(:project_id) { 0 }
|
||||
|
||||
it 'shows 404 for unknown project' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -25,6 +25,23 @@ RSpec.describe Admin::RunnersController do
|
|||
describe '#show' do
|
||||
render_views
|
||||
|
||||
let_it_be(:project) { create(:project) }
|
||||
|
||||
before_all do
|
||||
create(:ci_build, runner: runner, project: project)
|
||||
end
|
||||
|
||||
it 'redirects to the runner edit page' do
|
||||
get :show, params: { id: runner.id }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:redirect)
|
||||
expect(response).to redirect_to edit_admin_runner_path(runner)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#edit' do
|
||||
render_views
|
||||
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:project_two) { create(:project) }
|
||||
|
||||
|
|
@ -33,29 +50,29 @@ RSpec.describe Admin::RunnersController do
|
|||
create(:ci_build, runner: runner, project: project_two)
|
||||
end
|
||||
|
||||
it 'shows a particular runner' do
|
||||
get :show, params: { id: runner.id }
|
||||
it 'shows a runner edit page' do
|
||||
get :edit, params: { id: runner.id }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
|
||||
it 'shows 404 for unknown runner' do
|
||||
get :show, params: { id: 0 }
|
||||
get :edit, params: { id: 0 }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
||||
it 'avoids N+1 queries', :request_store do
|
||||
get :show, params: { id: runner.id }
|
||||
get :edit, params: { id: runner.id }
|
||||
|
||||
control_count = ActiveRecord::QueryRecorder.new { get :show, params: { id: runner.id } }.count
|
||||
control_count = ActiveRecord::QueryRecorder.new { get :edit, params: { id: runner.id } }.count
|
||||
|
||||
new_project = create(:project)
|
||||
create(:ci_build, runner: runner, project: new_project)
|
||||
|
||||
# There is one additional query looking up subject.group in ProjectPolicy for the
|
||||
# needs_new_sso_session permission
|
||||
expect { get :show, params: { id: runner.id } }.not_to exceed_query_limit(control_count + 1)
|
||||
expect { get :edit, params: { id: runner.id } }.not_to exceed_query_limit(control_count + 1)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -41,5 +41,29 @@ RSpec.describe Projects::Packages::InfrastructureRegistryController do
|
|||
|
||||
it_behaves_like 'returning response status', :not_found
|
||||
end
|
||||
|
||||
context 'with package file pending destruction' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: terraform_module) }
|
||||
|
||||
let(:terraform_module_package_file) { terraform_module.package_files.first }
|
||||
|
||||
it 'does not return them' do
|
||||
subject
|
||||
|
||||
expect(assigns(:package_files)).to contain_exactly(terraform_module_package_file)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns them' do
|
||||
subject
|
||||
|
||||
expect(assigns(:package_files)).to contain_exactly(package_file_pending_destruction, terraform_module_package_file)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ FactoryBot.define do
|
|||
|
||||
file_name { 'somefile.txt' }
|
||||
|
||||
status { :default }
|
||||
|
||||
transient do
|
||||
file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanfile.py' }
|
||||
end
|
||||
|
|
@ -14,6 +16,10 @@ FactoryBot.define do
|
|||
package_file.file = fixture_file_upload(evaluator.file_fixture)
|
||||
end
|
||||
|
||||
trait :pending_destruction do
|
||||
status { :pending_destruction }
|
||||
end
|
||||
|
||||
factory :conan_package_file do
|
||||
package { association(:conan_package, without_package_files: true) }
|
||||
|
||||
|
|
|
|||
|
|
@ -449,19 +449,21 @@ RSpec.describe "Admin Runners" do
|
|||
end
|
||||
end
|
||||
|
||||
describe "Runner show page" do
|
||||
describe "Runner edit page" do
|
||||
let(:runner) { create(:ci_runner) }
|
||||
|
||||
before do
|
||||
@project1 = create(:project)
|
||||
@project2 = create(:project)
|
||||
visit admin_runner_path(runner)
|
||||
visit edit_admin_runner_path(runner)
|
||||
|
||||
wait_for_requests
|
||||
end
|
||||
|
||||
describe 'runner page breadcrumbs' do
|
||||
it 'contains the current runner token' do
|
||||
it 'contains the current runner id and token' do
|
||||
page.within '[data-testid="breadcrumb-links"]' do
|
||||
expect(page.find('h2')).to have_content(runner.short_sha)
|
||||
expect(page.find('h2')).to have_content("##{runner.id} (#{runner.short_sha})")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -510,7 +512,7 @@ RSpec.describe "Admin Runners" do
|
|||
let(:runner) { create(:ci_runner, :project, projects: [@project1]) }
|
||||
|
||||
before do
|
||||
visit admin_runner_path(runner)
|
||||
visit edit_admin_runner_path(runner)
|
||||
end
|
||||
|
||||
it_behaves_like 'assignable runner'
|
||||
|
|
@ -520,7 +522,7 @@ RSpec.describe "Admin Runners" do
|
|||
let(:runner) { create(:ci_runner, :project, projects: [@project1], locked: true) }
|
||||
|
||||
before do
|
||||
visit admin_runner_path(runner)
|
||||
visit edit_admin_runner_path(runner)
|
||||
end
|
||||
|
||||
it_behaves_like 'assignable runner'
|
||||
|
|
@ -531,7 +533,7 @@ RSpec.describe "Admin Runners" do
|
|||
|
||||
before do
|
||||
@project1.destroy!
|
||||
visit admin_runner_path(runner)
|
||||
visit edit_admin_runner_path(runner)
|
||||
end
|
||||
|
||||
it_behaves_like 'assignable runner'
|
||||
|
|
@ -542,7 +544,7 @@ RSpec.describe "Admin Runners" do
|
|||
let(:runner) { create(:ci_runner, :project, projects: [@project1]) }
|
||||
|
||||
before do
|
||||
visit admin_runner_path(runner)
|
||||
visit edit_admin_runner_path(runner)
|
||||
end
|
||||
|
||||
it 'removed specific runner from project' do
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ RSpec.describe ::Packages::Conan::PackageFileFinder do
|
|||
let(:package_file_name) { package_file.file_name }
|
||||
let(:params) { {} }
|
||||
|
||||
RSpec.shared_examples 'package file finder examples' do
|
||||
shared_examples 'package file finder examples' do
|
||||
it { is_expected.to eq(package_file) }
|
||||
|
||||
context 'with conan_file_type' do
|
||||
|
|
@ -39,11 +39,37 @@ RSpec.describe ::Packages::Conan::PackageFileFinder do
|
|||
end
|
||||
end
|
||||
|
||||
shared_examples 'not returning pending_destruction package files' do
|
||||
let_it_be(:recent_package_file_pending_destruction) do
|
||||
create(:package_file, :pending_destruction, package: package, file_name: package_file.file_name)
|
||||
end
|
||||
|
||||
it 'returns the correct package file' do
|
||||
expect(package.package_files.last).to eq(recent_package_file_pending_destruction)
|
||||
|
||||
expect(subject).to eq(package_file)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns the correct package file' do
|
||||
expect(package.package_files.last).to eq(recent_package_file_pending_destruction)
|
||||
|
||||
expect(subject).to eq(recent_package_file_pending_destruction)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#execute' do
|
||||
subject { described_class.new(package, package_file_name, params).execute }
|
||||
|
||||
it_behaves_like 'package file finder examples'
|
||||
|
||||
it_behaves_like 'not returning pending_destruction package files'
|
||||
|
||||
context 'with unknown file_name' do
|
||||
let(:package_file_name) { 'unknown.jpg' }
|
||||
|
||||
|
|
@ -56,6 +82,8 @@ RSpec.describe ::Packages::Conan::PackageFileFinder do
|
|||
|
||||
it_behaves_like 'package file finder examples'
|
||||
|
||||
it_behaves_like 'not returning pending_destruction package files'
|
||||
|
||||
context 'with unknown file_name' do
|
||||
let(:package_file_name) { 'unknown.jpg' }
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ RSpec.describe Packages::PackageFileFinder do
|
|||
let(:package_file_name) { package_file.file_name }
|
||||
let(:params) { {} }
|
||||
|
||||
RSpec.shared_examples 'package file finder examples' do
|
||||
shared_examples 'package file finder examples' do
|
||||
it { is_expected.to eq(package_file) }
|
||||
|
||||
context 'with file_name_like' do
|
||||
|
|
@ -19,11 +19,35 @@ RSpec.describe Packages::PackageFileFinder do
|
|||
end
|
||||
end
|
||||
|
||||
shared_examples 'not returning pending_destruction package files' do
|
||||
let_it_be(:recent_package_file_pending_destruction) do
|
||||
create(:package_file, :pending_destruction, package: package, file_name: package_file.file_name)
|
||||
end
|
||||
|
||||
it 'returns the correct package file' do
|
||||
expect(package.package_files.last).to eq(recent_package_file_pending_destruction)
|
||||
|
||||
expect(subject).to eq(package_file)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns them' do
|
||||
expect(subject).to eq(recent_package_file_pending_destruction)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#execute' do
|
||||
subject { described_class.new(package, package_file_name, params).execute }
|
||||
|
||||
it_behaves_like 'package file finder examples'
|
||||
|
||||
it_behaves_like 'not returning pending_destruction package files'
|
||||
|
||||
context 'with unknown file_name' do
|
||||
let(:package_file_name) { 'unknown.jpg' }
|
||||
|
||||
|
|
@ -36,6 +60,8 @@ RSpec.describe Packages::PackageFileFinder do
|
|||
|
||||
it_behaves_like 'package file finder examples'
|
||||
|
||||
it_behaves_like 'not returning pending_destruction package files'
|
||||
|
||||
context 'with unknown file_name' do
|
||||
let(:package_file_name) { 'unknown.jpg' }
|
||||
|
||||
|
|
|
|||
|
|
@ -11,167 +11,155 @@ RSpec.describe UserGroupNotificationSettingsFinder do
|
|||
subject.map(&proc).uniq
|
||||
end
|
||||
|
||||
shared_examples 'user group notifications settings tests' do
|
||||
context 'when the groups have no existing notification settings' do
|
||||
context 'when the groups have no ancestors' do
|
||||
let_it_be(:groups) { create_list(:group, 3) }
|
||||
context 'when the groups have no existing notification settings' do
|
||||
context 'when the groups have no ancestors' do
|
||||
let_it_be(:groups) { create_list(:group, 3) }
|
||||
|
||||
it 'will be a default Global notification setting', :aggregate_failures do
|
||||
expect(subject.count).to eq(3)
|
||||
expect(attributes(&:notification_email)).to match_array([nil])
|
||||
expect(attributes(&:level)).to match_array(['global'])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the groups have ancestors' do
|
||||
context 'when an ancestor has a level other than Global' do
|
||||
let_it_be(:ancestor_a) { create(:group) }
|
||||
let_it_be(:group_a) { create(:group, parent: ancestor_a) }
|
||||
let_it_be(:ancestor_b) { create(:group) }
|
||||
let_it_be(:group_b) { create(:group, parent: ancestor_b) }
|
||||
let_it_be(:email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) }
|
||||
|
||||
let_it_be(:groups) { [group_a, group_b] }
|
||||
|
||||
before do
|
||||
create(:notification_setting, user: user, source: ancestor_a, level: 'participating', notification_email: email.email)
|
||||
create(:notification_setting, user: user, source: ancestor_b, level: 'participating', notification_email: email.email)
|
||||
end
|
||||
|
||||
it 'has the same level set' do
|
||||
expect(attributes(&:level)).to match_array(['participating'])
|
||||
end
|
||||
|
||||
it 'has the same email set' do
|
||||
expect(attributes(&:notification_email)).to match_array(['ancestor@example.com'])
|
||||
end
|
||||
|
||||
it 'only returns the two queried groups' do
|
||||
expect(subject.count).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an ancestor has a Global level but has an email set' do
|
||||
let_it_be(:grand_ancestor) { create(:group) }
|
||||
let_it_be(:ancestor) { create(:group, parent: grand_ancestor) }
|
||||
let_it_be(:group) { create(:group, parent: ancestor) }
|
||||
let_it_be(:ancestor_email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) }
|
||||
let_it_be(:grand_email) { create(:email, :confirmed, email: 'grand@example.com', user: user) }
|
||||
|
||||
let_it_be(:groups) { [group] }
|
||||
|
||||
before do
|
||||
create(:notification_setting, user: user, source: grand_ancestor, level: 'participating', notification_email: grand_email.email)
|
||||
create(:notification_setting, user: user, source: ancestor, level: 'global', notification_email: ancestor_email.email)
|
||||
end
|
||||
|
||||
it 'has the same email and level set', :aggregate_failures do
|
||||
expect(subject.count).to eq(1)
|
||||
expect(attributes(&:level)).to match_array(['global'])
|
||||
expect(attributes(&:notification_email)).to match_array(['ancestor@example.com'])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the group has parent_id set but that does not belong to any group' do
|
||||
let_it_be(:group) { create(:group) }
|
||||
let_it_be(:groups) { [group] }
|
||||
|
||||
before do
|
||||
# Let's set a parent_id for a group that definitely doesn't exist
|
||||
group.update_columns(parent_id: 19283746)
|
||||
end
|
||||
|
||||
it 'returns a default Global notification setting' do
|
||||
expect(subject.count).to eq(1)
|
||||
expect(attributes(&:level)).to match_array(['global'])
|
||||
expect(attributes(&:notification_email)).to match_array([nil])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the group has a private parent' do
|
||||
let_it_be(:ancestor) { create(:group, :private) }
|
||||
let_it_be(:group) { create(:group, :private, parent: ancestor) }
|
||||
let_it_be(:ancestor_email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) }
|
||||
let_it_be(:groups) { [group] }
|
||||
|
||||
before do
|
||||
group.add_reporter(user)
|
||||
# Adding the user creates a NotificationSetting, so we remove it here
|
||||
user.notification_settings.where(source: group).delete_all
|
||||
|
||||
create(:notification_setting, user: user, source: ancestor, level: 'participating', notification_email: ancestor_email.email)
|
||||
end
|
||||
|
||||
it 'still inherits the notification settings' do
|
||||
expect(subject.count).to eq(1)
|
||||
expect(attributes(&:level)).to match_array(['participating'])
|
||||
expect(attributes(&:notification_email)).to match_array([ancestor_email.email])
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not cause an N+1', :aggregate_failures do
|
||||
parent = create(:group)
|
||||
child = create(:group, parent: parent)
|
||||
|
||||
control = ActiveRecord::QueryRecorder.new do
|
||||
described_class.new(user, Group.where(id: child.id)).execute
|
||||
end
|
||||
|
||||
other_parent = create(:group)
|
||||
other_children = create_list(:group, 2, parent: other_parent)
|
||||
|
||||
result = nil
|
||||
|
||||
expect do
|
||||
result = described_class.new(user, Group.where(id: other_children.append(child).map(&:id))).execute
|
||||
end.not_to exceed_query_limit(control)
|
||||
|
||||
expect(result.count).to eq(3)
|
||||
end
|
||||
it 'will be a default Global notification setting', :aggregate_failures do
|
||||
expect(subject.count).to eq(3)
|
||||
expect(attributes(&:notification_email)).to match_array([nil])
|
||||
expect(attributes(&:level)).to match_array(['global'])
|
||||
end
|
||||
end
|
||||
|
||||
context 'preloading `emails_disabled`' do
|
||||
let_it_be(:root_group) { create(:group) }
|
||||
let_it_be(:sub_group) { create(:group, parent: root_group) }
|
||||
let_it_be(:sub_sub_group) { create(:group, parent: sub_group) }
|
||||
context 'when the groups have ancestors' do
|
||||
context 'when an ancestor has a level other than Global' do
|
||||
let_it_be(:ancestor_a) { create(:group) }
|
||||
let_it_be(:group_a) { create(:group, parent: ancestor_a) }
|
||||
let_it_be(:ancestor_b) { create(:group) }
|
||||
let_it_be(:group_b) { create(:group, parent: ancestor_b) }
|
||||
let_it_be(:email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) }
|
||||
|
||||
let_it_be(:another_root_group) { create(:group) }
|
||||
let_it_be(:sub_group_with_emails_disabled) { create(:group, emails_disabled: true, parent: another_root_group) }
|
||||
let_it_be(:another_sub_sub_group) { create(:group, parent: sub_group_with_emails_disabled) }
|
||||
let_it_be(:groups) { [group_a, group_b] }
|
||||
|
||||
let_it_be(:root_group_with_emails_disabled) { create(:group, emails_disabled: true) }
|
||||
let_it_be(:group) { create(:group, parent: root_group_with_emails_disabled) }
|
||||
|
||||
let(:groups) { Group.where(id: [sub_sub_group, another_sub_sub_group, group]) }
|
||||
|
||||
before do
|
||||
described_class.new(user, groups).execute
|
||||
end
|
||||
|
||||
it 'preloads the `group.emails_disabled` method' do
|
||||
recorder = ActiveRecord::QueryRecorder.new do
|
||||
groups.each(&:emails_disabled?)
|
||||
before do
|
||||
create(:notification_setting, user: user, source: ancestor_a, level: 'participating', notification_email: email.email)
|
||||
create(:notification_setting, user: user, source: ancestor_b, level: 'participating', notification_email: email.email)
|
||||
end
|
||||
|
||||
expect(recorder.count).to eq(0)
|
||||
it 'has the same level set' do
|
||||
expect(attributes(&:level)).to match_array(['participating'])
|
||||
end
|
||||
|
||||
it 'has the same email set' do
|
||||
expect(attributes(&:notification_email)).to match_array(['ancestor@example.com'])
|
||||
end
|
||||
|
||||
it 'only returns the two queried groups' do
|
||||
expect(subject.count).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
it 'preloads the `group.emails_disabled` method correctly' do
|
||||
groups.each do |group|
|
||||
expect(group.emails_disabled?).to eq(Group.find(group.id).emails_disabled?) # compare the memoized and the freshly loaded value
|
||||
context 'when an ancestor has a Global level but has an email set' do
|
||||
let_it_be(:grand_ancestor) { create(:group) }
|
||||
let_it_be(:ancestor) { create(:group, parent: grand_ancestor) }
|
||||
let_it_be(:group) { create(:group, parent: ancestor) }
|
||||
let_it_be(:ancestor_email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) }
|
||||
let_it_be(:grand_email) { create(:email, :confirmed, email: 'grand@example.com', user: user) }
|
||||
|
||||
let_it_be(:groups) { [group] }
|
||||
|
||||
before do
|
||||
create(:notification_setting, user: user, source: grand_ancestor, level: 'participating', notification_email: grand_email.email)
|
||||
create(:notification_setting, user: user, source: ancestor, level: 'global', notification_email: ancestor_email.email)
|
||||
end
|
||||
|
||||
it 'has the same email and level set', :aggregate_failures do
|
||||
expect(subject.count).to eq(1)
|
||||
expect(attributes(&:level)).to match_array(['global'])
|
||||
expect(attributes(&:notification_email)).to match_array(['ancestor@example.com'])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the group has parent_id set but that does not belong to any group' do
|
||||
let_it_be(:group) { create(:group) }
|
||||
let_it_be(:groups) { [group] }
|
||||
|
||||
before do
|
||||
# Let's set a parent_id for a group that definitely doesn't exist
|
||||
group.update_columns(parent_id: 19283746)
|
||||
end
|
||||
|
||||
it 'returns a default Global notification setting' do
|
||||
expect(subject.count).to eq(1)
|
||||
expect(attributes(&:level)).to match_array(['global'])
|
||||
expect(attributes(&:notification_email)).to match_array([nil])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the group has a private parent' do
|
||||
let_it_be(:ancestor) { create(:group, :private) }
|
||||
let_it_be(:group) { create(:group, :private, parent: ancestor) }
|
||||
let_it_be(:ancestor_email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) }
|
||||
let_it_be(:groups) { [group] }
|
||||
|
||||
before do
|
||||
group.add_reporter(user)
|
||||
# Adding the user creates a NotificationSetting, so we remove it here
|
||||
user.notification_settings.where(source: group).delete_all
|
||||
|
||||
create(:notification_setting, user: user, source: ancestor, level: 'participating', notification_email: ancestor_email.email)
|
||||
end
|
||||
|
||||
it 'still inherits the notification settings' do
|
||||
expect(subject.count).to eq(1)
|
||||
expect(attributes(&:level)).to match_array(['participating'])
|
||||
expect(attributes(&:notification_email)).to match_array([ancestor_email.email])
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not cause an N+1', :aggregate_failures do
|
||||
parent = create(:group)
|
||||
child = create(:group, parent: parent)
|
||||
|
||||
control = ActiveRecord::QueryRecorder.new do
|
||||
described_class.new(user, Group.where(id: child.id)).execute
|
||||
end
|
||||
|
||||
other_parent = create(:group)
|
||||
other_children = create_list(:group, 2, parent: other_parent)
|
||||
|
||||
result = nil
|
||||
|
||||
expect do
|
||||
result = described_class.new(user, Group.where(id: other_children.append(child).map(&:id))).execute
|
||||
end.not_to exceed_query_limit(control)
|
||||
|
||||
expect(result.count).to eq(3)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'user group notifications settings tests'
|
||||
context 'preloading `emails_disabled`' do
|
||||
let_it_be(:root_group) { create(:group) }
|
||||
let_it_be(:sub_group) { create(:group, parent: root_group) }
|
||||
let_it_be(:sub_sub_group) { create(:group, parent: sub_group) }
|
||||
|
||||
let_it_be(:another_root_group) { create(:group) }
|
||||
let_it_be(:sub_group_with_emails_disabled) { create(:group, emails_disabled: true, parent: another_root_group) }
|
||||
let_it_be(:another_sub_sub_group) { create(:group, parent: sub_group_with_emails_disabled) }
|
||||
|
||||
let_it_be(:root_group_with_emails_disabled) { create(:group, emails_disabled: true) }
|
||||
let_it_be(:group) { create(:group, parent: root_group_with_emails_disabled) }
|
||||
|
||||
let(:groups) { Group.where(id: [sub_sub_group, another_sub_sub_group, group]) }
|
||||
|
||||
context 'when feature flag :linear_user_group_notification_settings_finder_ancestors_scopes is disabled' do
|
||||
before do
|
||||
stub_feature_flags(linear_user_group_notification_settings_finder_ancestors_scopes: false)
|
||||
described_class.new(user, groups).execute
|
||||
end
|
||||
|
||||
it_behaves_like 'user group notifications settings tests'
|
||||
it 'preloads the `group.emails_disabled` method' do
|
||||
recorder = ActiveRecord::QueryRecorder.new do
|
||||
groups.each(&:emails_disabled?)
|
||||
end
|
||||
|
||||
expect(recorder.count).to eq(0)
|
||||
end
|
||||
|
||||
it 'preloads the `group.emails_disabled` method correctly' do
|
||||
groups.each do |group|
|
||||
expect(group.emails_disabled?).to eq(Group.find(group.id).emails_disabled?) # compare the memoized and the freshly loaded value
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ describe('Customer relations contact form component', () => {
|
|||
await waitForPromises();
|
||||
|
||||
expect(findError().exists()).toBe(true);
|
||||
expect(findError().text()).toBe('Phone is invalid.');
|
||||
expect(findError().text()).toBe('create contact is invalid.');
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -151,7 +151,7 @@ describe('Customer relations contact form component', () => {
|
|||
await waitForPromises();
|
||||
|
||||
expect(findError().exists()).toBe(true);
|
||||
expect(findError().text()).toBe('Email is invalid.');
|
||||
expect(findError().text()).toBe('update contact is invalid.');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,278 @@
|
|||
import { GlAlert } from '@gitlab/ui';
|
||||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import VueRouter from 'vue-router';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import Form from '~/crm/components/form.vue';
|
||||
import routes from '~/crm/routes';
|
||||
import createContactMutation from '~/crm/components/queries/create_contact.mutation.graphql';
|
||||
import updateContactMutation from '~/crm/components/queries/update_contact.mutation.graphql';
|
||||
import getGroupContactsQuery from '~/crm/components/queries/get_group_contacts.query.graphql';
|
||||
import createOrganizationMutation from '~/crm/components/queries/create_organization.mutation.graphql';
|
||||
import getGroupOrganizationsQuery from '~/crm/components/queries/get_group_organizations.query.graphql';
|
||||
import {
|
||||
createContactMutationErrorResponse,
|
||||
createContactMutationResponse,
|
||||
getGroupContactsQueryResponse,
|
||||
updateContactMutationErrorResponse,
|
||||
updateContactMutationResponse,
|
||||
createOrganizationMutationErrorResponse,
|
||||
createOrganizationMutationResponse,
|
||||
getGroupOrganizationsQueryResponse,
|
||||
} from './mock_data';
|
||||
|
||||
const FORM_CREATE_CONTACT = 'create contact';
|
||||
const FORM_UPDATE_CONTACT = 'update contact';
|
||||
const FORM_CREATE_ORG = 'create organization';
|
||||
|
||||
describe('Reusable form component', () => {
|
||||
Vue.use(VueApollo);
|
||||
Vue.use(VueRouter);
|
||||
|
||||
const DEFAULT_RESPONSES = {
|
||||
createContact: Promise.resolve(createContactMutationResponse),
|
||||
updateContact: Promise.resolve(updateContactMutationResponse),
|
||||
createOrg: Promise.resolve(createOrganizationMutationResponse),
|
||||
};
|
||||
|
||||
let wrapper;
|
||||
let handler;
|
||||
let fakeApollo;
|
||||
let router;
|
||||
|
||||
beforeEach(() => {
|
||||
router = new VueRouter({
|
||||
base: '',
|
||||
mode: 'history',
|
||||
routes,
|
||||
});
|
||||
router.push('/test');
|
||||
|
||||
handler = jest.fn().mockImplementation((key) => DEFAULT_RESPONSES[key]);
|
||||
|
||||
const hanlderWithKey = (key) => (...args) => handler(key, ...args);
|
||||
|
||||
fakeApollo = createMockApollo([
|
||||
[createContactMutation, hanlderWithKey('createContact')],
|
||||
[updateContactMutation, hanlderWithKey('updateContact')],
|
||||
[createOrganizationMutation, hanlderWithKey('createOrg')],
|
||||
]);
|
||||
|
||||
fakeApollo.clients.defaultClient.cache.writeQuery({
|
||||
query: getGroupContactsQuery,
|
||||
variables: { groupFullPath: 'flightjs' },
|
||||
data: getGroupContactsQueryResponse.data,
|
||||
});
|
||||
|
||||
fakeApollo.clients.defaultClient.cache.writeQuery({
|
||||
query: getGroupOrganizationsQuery,
|
||||
variables: { groupFullPath: 'flightjs' },
|
||||
data: getGroupOrganizationsQueryResponse.data,
|
||||
});
|
||||
});
|
||||
|
||||
const mockToastShow = jest.fn();
|
||||
|
||||
const findSaveButton = () => wrapper.findByTestId('save-button');
|
||||
const findForm = () => wrapper.find('form');
|
||||
const findError = () => wrapper.findComponent(GlAlert);
|
||||
|
||||
const mountComponent = (propsData) => {
|
||||
wrapper = shallowMountExtended(Form, {
|
||||
router,
|
||||
apolloProvider: fakeApollo,
|
||||
propsData: { drawerOpen: true, ...propsData },
|
||||
mocks: {
|
||||
$toast: {
|
||||
show: mockToastShow,
|
||||
},
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const mountContact = ({ propsData } = {}) => {
|
||||
mountComponent({
|
||||
fields: [
|
||||
{ name: 'firstName', label: 'First name', required: true },
|
||||
{ name: 'lastName', label: 'Last name', required: true },
|
||||
{ name: 'email', label: 'Email', required: true },
|
||||
{ name: 'phone', label: 'Phone' },
|
||||
{ name: 'description', label: 'Description' },
|
||||
],
|
||||
...propsData,
|
||||
});
|
||||
};
|
||||
|
||||
const mountContactCreate = () => {
|
||||
const propsData = {
|
||||
title: 'New contact',
|
||||
successMessage: 'Contact has been added',
|
||||
buttonLabel: 'Create contact',
|
||||
getQuery: {
|
||||
query: getGroupContactsQuery,
|
||||
variables: { groupFullPath: 'flightjs' },
|
||||
},
|
||||
getQueryNodePath: 'group.contacts',
|
||||
mutation: createContactMutation,
|
||||
additionalCreateParams: { groupId: 'gid://gitlab/Group/26' },
|
||||
};
|
||||
mountContact({ propsData });
|
||||
};
|
||||
|
||||
const mountContactUpdate = () => {
|
||||
const propsData = {
|
||||
title: 'Edit contact',
|
||||
successMessage: 'Contact has been updated',
|
||||
mutation: updateContactMutation,
|
||||
existingModel: {
|
||||
id: 'gid://gitlab/CustomerRelations::Contact/12',
|
||||
firstName: 'First',
|
||||
lastName: 'Last',
|
||||
email: 'email@example.com',
|
||||
},
|
||||
};
|
||||
mountContact({ propsData });
|
||||
};
|
||||
|
||||
const mountOrganization = ({ propsData } = {}) => {
|
||||
mountComponent({
|
||||
fields: [
|
||||
{ name: 'name', label: 'Name', required: true },
|
||||
{ name: 'defaultRate', label: 'Default rate', input: { type: 'number', step: '0.01' } },
|
||||
{ name: 'description', label: 'Description' },
|
||||
],
|
||||
...propsData,
|
||||
});
|
||||
};
|
||||
|
||||
const mountOrganizationCreate = () => {
|
||||
const propsData = {
|
||||
title: 'New organization',
|
||||
successMessage: 'Organization has been added',
|
||||
buttonLabel: 'Create organization',
|
||||
getQuery: {
|
||||
query: getGroupOrganizationsQuery,
|
||||
variables: { groupFullPath: 'flightjs' },
|
||||
},
|
||||
getQueryNodePath: 'group.organizations',
|
||||
mutation: createOrganizationMutation,
|
||||
additionalCreateParams: { groupId: 'gid://gitlab/Group/26' },
|
||||
};
|
||||
mountOrganization({ propsData });
|
||||
};
|
||||
|
||||
const forms = {
|
||||
[FORM_CREATE_CONTACT]: {
|
||||
mountFunction: mountContactCreate,
|
||||
mutationErrorResponse: createContactMutationErrorResponse,
|
||||
toastMessage: 'Contact has been added',
|
||||
},
|
||||
[FORM_UPDATE_CONTACT]: {
|
||||
mountFunction: mountContactUpdate,
|
||||
mutationErrorResponse: updateContactMutationErrorResponse,
|
||||
toastMessage: 'Contact has been updated',
|
||||
},
|
||||
[FORM_CREATE_ORG]: {
|
||||
mountFunction: mountOrganizationCreate,
|
||||
mutationErrorResponse: createOrganizationMutationErrorResponse,
|
||||
toastMessage: 'Organization has been added',
|
||||
},
|
||||
};
|
||||
const asTestParams = (...keys) => keys.map((name) => [name, forms[name]]);
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
});
|
||||
|
||||
describe.each(asTestParams(FORM_CREATE_CONTACT, FORM_UPDATE_CONTACT))(
|
||||
'%s form save button',
|
||||
(name, { mountFunction }) => {
|
||||
beforeEach(() => {
|
||||
mountFunction();
|
||||
});
|
||||
|
||||
it('should be disabled when required fields are empty', async () => {
|
||||
wrapper.find('#firstName').vm.$emit('input', '');
|
||||
await waitForPromises();
|
||||
|
||||
expect(findSaveButton().props('disabled')).toBe(true);
|
||||
});
|
||||
|
||||
it('should not be disabled when required fields have values', async () => {
|
||||
wrapper.find('#firstName').vm.$emit('input', 'A');
|
||||
wrapper.find('#lastName').vm.$emit('input', 'B');
|
||||
wrapper.find('#email').vm.$emit('input', 'C');
|
||||
await waitForPromises();
|
||||
|
||||
expect(findSaveButton().props('disabled')).toBe(false);
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
describe.each(asTestParams(FORM_CREATE_ORG))('%s form save button', (name, { mountFunction }) => {
|
||||
beforeEach(() => {
|
||||
mountFunction();
|
||||
});
|
||||
|
||||
it('should be disabled when required field is empty', async () => {
|
||||
wrapper.find('#name').vm.$emit('input', '');
|
||||
await waitForPromises();
|
||||
|
||||
expect(findSaveButton().props('disabled')).toBe(true);
|
||||
});
|
||||
|
||||
it('should not be disabled when required field has a value', async () => {
|
||||
wrapper.find('#name').vm.$emit('input', 'A');
|
||||
await waitForPromises();
|
||||
|
||||
expect(findSaveButton().props('disabled')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe.each(asTestParams(FORM_CREATE_CONTACT, FORM_UPDATE_CONTACT, FORM_CREATE_ORG))(
|
||||
'when %s mutation is successful',
|
||||
(name, { mountFunction, toastMessage }) => {
|
||||
it('form should display correct toast message', async () => {
|
||||
mountFunction();
|
||||
|
||||
findForm().trigger('submit');
|
||||
await waitForPromises();
|
||||
|
||||
expect(mockToastShow).toHaveBeenCalledWith(toastMessage);
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
describe.each(asTestParams(FORM_CREATE_CONTACT, FORM_UPDATE_CONTACT, FORM_CREATE_ORG))(
|
||||
'when %s mutation fails',
|
||||
(formName, { mutationErrorResponse, mountFunction }) => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(console, 'error').mockImplementation();
|
||||
});
|
||||
|
||||
it('should show error on reject', async () => {
|
||||
handler.mockRejectedValue('ERROR');
|
||||
|
||||
mountFunction();
|
||||
|
||||
findForm().trigger('submit');
|
||||
await waitForPromises();
|
||||
|
||||
expect(findError().text()).toBe('Something went wrong. Please try again.');
|
||||
});
|
||||
|
||||
it('should show error on error response', async () => {
|
||||
handler.mockResolvedValue(mutationErrorResponse);
|
||||
|
||||
mountFunction();
|
||||
|
||||
findForm().trigger('submit');
|
||||
await waitForPromises();
|
||||
|
||||
expect(findError().text()).toBe(`${formName} is invalid.`);
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
|
|
@ -82,7 +82,6 @@ export const getGroupOrganizationsQueryResponse = {
|
|||
export const createContactMutationResponse = {
|
||||
data: {
|
||||
customerRelationsContactCreate: {
|
||||
__typeName: 'CustomerRelationsContactCreatePayload',
|
||||
contact: {
|
||||
__typename: 'CustomerRelationsContact',
|
||||
id: 'gid://gitlab/CustomerRelations::Contact/1',
|
||||
|
|
@ -102,7 +101,7 @@ export const createContactMutationErrorResponse = {
|
|||
data: {
|
||||
customerRelationsContactCreate: {
|
||||
contact: null,
|
||||
errors: ['Phone is invalid.'],
|
||||
errors: ['create contact is invalid.'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
@ -130,7 +129,7 @@ export const updateContactMutationErrorResponse = {
|
|||
data: {
|
||||
customerRelationsContactUpdate: {
|
||||
contact: null,
|
||||
errors: ['Email is invalid.'],
|
||||
errors: ['update contact is invalid.'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
@ -138,7 +137,6 @@ export const updateContactMutationErrorResponse = {
|
|||
export const createOrganizationMutationResponse = {
|
||||
data: {
|
||||
customerRelationsOrganizationCreate: {
|
||||
__typeName: 'CustomerRelationsOrganizationCreatePayload',
|
||||
organization: {
|
||||
__typename: 'CustomerRelationsOrganization',
|
||||
id: 'gid://gitlab/CustomerRelations::Organization/2',
|
||||
|
|
@ -155,7 +153,7 @@ export const createOrganizationMutationErrorResponse = {
|
|||
data: {
|
||||
customerRelationsOrganizationCreate: {
|
||||
organization: null,
|
||||
errors: ['Name cannot be blank.'],
|
||||
errors: ['create organization is invalid.'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ describe('Customer relations organizations root app', () => {
|
|||
await waitForPromises();
|
||||
|
||||
expect(findError().exists()).toBe(true);
|
||||
expect(findError().text()).toBe('Name cannot be blank.');
|
||||
expect(findError().text()).toBe('create organization is invalid.');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -3,14 +3,31 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe API::Entities::Ci::Pipeline do
|
||||
let_it_be(:pipeline) { create(:ci_empty_pipeline) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:pipeline) { create(:ci_empty_pipeline, user: user) }
|
||||
let_it_be(:job) { create(:ci_build, name: "rspec", coverage: 30.212, pipeline: pipeline) }
|
||||
|
||||
let(:entity) { described_class.new(pipeline) }
|
||||
|
||||
subject { entity.as_json }
|
||||
|
||||
it 'returns the coverage as a string' do
|
||||
exposed_fields = %i[before_sha tag yaml_errors created_at updated_at started_at finished_at committed_at duration queued_duration]
|
||||
|
||||
exposed_fields.each do |field|
|
||||
it "exposes pipeline #{field}" do
|
||||
expect(subject[field]).to eq(pipeline.public_send(field))
|
||||
end
|
||||
end
|
||||
|
||||
it 'exposes pipeline user basic information' do
|
||||
expect(subject[:user].keys).to include(:avatar_url, :web_url)
|
||||
end
|
||||
|
||||
it 'exposes pipeline detailed status' do
|
||||
expect(subject[:detailed_status].keys).to include(:icon, :favicon)
|
||||
end
|
||||
|
||||
it 'exposes pipeline coverage as a string' do
|
||||
expect(subject[:coverage]).to eq '30.21'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -134,7 +134,7 @@ RSpec.describe Backup::Files do
|
|||
|
||||
expect do
|
||||
subject.dump
|
||||
end.to raise_error(/Backup operation failed:/)
|
||||
end.to raise_error(/Failed to create compressed file/)
|
||||
end
|
||||
|
||||
describe 'with STRATEGY=copy' do
|
||||
|
|
@ -170,7 +170,7 @@ RSpec.describe Backup::Files do
|
|||
expect do
|
||||
subject.dump
|
||||
end.to output(/rsync failed/).to_stdout
|
||||
.and raise_error(/Backup failed/)
|
||||
.and raise_error(/Failed to create compressed file/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,42 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Backup::RepositoryBackupError do
|
||||
let_it_be(:snippet) { create(:snippet, content: 'foo', file_name: 'foo') }
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be(:wiki) { ProjectWiki.new(project, nil ) }
|
||||
|
||||
let(:backup_repos_path) { '/tmp/backup/repositories' }
|
||||
|
||||
shared_examples 'includes backup path' do
|
||||
it { is_expected.to respond_to :container }
|
||||
it { is_expected.to respond_to :backup_repos_path }
|
||||
|
||||
it 'expects exception message to include repo backup path location' do
|
||||
expect(subject.message).to include("#{subject.backup_repos_path}")
|
||||
end
|
||||
|
||||
it 'expects exception message to include container being back-up' do
|
||||
expect(subject.message).to include("#{subject.container.disk_path}")
|
||||
end
|
||||
end
|
||||
|
||||
context 'with snippet repository' do
|
||||
subject { described_class.new(snippet, backup_repos_path) }
|
||||
|
||||
it_behaves_like 'includes backup path'
|
||||
end
|
||||
|
||||
context 'with project repository' do
|
||||
subject { described_class.new(project, backup_repos_path) }
|
||||
|
||||
it_behaves_like 'includes backup path'
|
||||
end
|
||||
|
||||
context 'with wiki repository' do
|
||||
subject { described_class.new(wiki, backup_repos_path) }
|
||||
|
||||
it_behaves_like 'includes backup path'
|
||||
end
|
||||
end
|
||||
|
|
@ -10,6 +10,8 @@ RSpec.describe Packages::PackageFile, type: :model do
|
|||
let_it_be(:package_file3) { create(:package_file, :xml, file_name: 'formatted.zip') }
|
||||
let_it_be(:debian_package) { create(:debian_package, project: project) }
|
||||
|
||||
it_behaves_like 'having unique enum values'
|
||||
|
||||
describe 'relationships' do
|
||||
it { is_expected.to belong_to(:package) }
|
||||
it { is_expected.to have_one(:conan_file_metadatum) }
|
||||
|
|
@ -138,6 +140,24 @@ RSpec.describe Packages::PackageFile, type: :model do
|
|||
it 'returns the matching file only for Helm packages' do
|
||||
expect(described_class.for_helm_with_channel(project, channel)).to contain_exactly(helm_file2)
|
||||
end
|
||||
|
||||
context 'with package files pending destruction' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:helm_package_file, :pending_destruction, package: helm_package2, channel: channel) }
|
||||
|
||||
it 'does not return them' do
|
||||
expect(described_class.for_helm_with_channel(project, channel)).to contain_exactly(helm_file2)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns them' do
|
||||
expect(described_class.for_helm_with_channel(project, channel)).to contain_exactly(helm_file2, package_file_pending_destruction)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.most_recent!' do
|
||||
|
|
@ -154,15 +174,17 @@ RSpec.describe Packages::PackageFile, type: :model do
|
|||
|
||||
let_it_be(:package_file3_2) { create(:package_file, :npm, package: package3) }
|
||||
let_it_be(:package_file3_3) { create(:package_file, :npm, package: package3) }
|
||||
let_it_be(:package_file3_4) { create(:package_file, :npm, :pending_destruction, package: package3) }
|
||||
|
||||
let_it_be(:package_file4_2) { create(:package_file, :npm, package: package2) }
|
||||
let_it_be(:package_file4_3) { create(:package_file, :npm, package: package2) }
|
||||
let_it_be(:package_file4_4) { create(:package_file, :npm, package: package2) }
|
||||
let_it_be(:package_file4_4) { create(:package_file, :npm, :pending_destruction, package: package2) }
|
||||
|
||||
let(:most_recent_package_file1) { package1.package_files.recent.first }
|
||||
let(:most_recent_package_file2) { package2.package_files.recent.first }
|
||||
let(:most_recent_package_file3) { package3.package_files.recent.first }
|
||||
let(:most_recent_package_file4) { package4.package_files.recent.first }
|
||||
let(:most_recent_package_file1) { package1.installable_package_files.recent.first }
|
||||
let(:most_recent_package_file2) { package2.installable_package_files.recent.first }
|
||||
let(:most_recent_package_file3) { package3.installable_package_files.recent.first }
|
||||
let(:most_recent_package_file4) { package4.installable_package_files.recent.first }
|
||||
|
||||
subject { described_class.most_recent_for(packages) }
|
||||
|
||||
|
|
@ -202,6 +224,24 @@ RSpec.describe Packages::PackageFile, type: :model do
|
|||
it 'returns the most recent package for the selected channel' do
|
||||
expect(subject).to contain_exactly(helm_package_file2)
|
||||
end
|
||||
|
||||
context 'with package files pending destruction' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:helm_package_file, :pending_destruction, package: helm_package, channel: 'alpha') }
|
||||
|
||||
it 'does not return them' do
|
||||
expect(subject).to contain_exactly(helm_package_file2)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns them' do
|
||||
expect(subject).to contain_exactly(package_file_pending_destruction)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -314,4 +354,25 @@ RSpec.describe Packages::PackageFile, type: :model do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'status scopes' do
|
||||
let_it_be(:package) { create(:package) }
|
||||
let_it_be(:default_package_file) { create(:package_file, package: package) }
|
||||
let_it_be(:pending_destruction_package_file) { create(:package_file, :pending_destruction, package: package) }
|
||||
|
||||
describe '.installable' do
|
||||
subject { package.installable_package_files }
|
||||
|
||||
it 'does not include non-displayable packages', :aggregate_failures do
|
||||
is_expected.to include(default_package_file)
|
||||
is_expected.not_to include(pending_destruction_package_file)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.with_status' do
|
||||
subject { described_class.with_status(:pending_destruction) }
|
||||
|
||||
it { is_expected.to contain_exactly(pending_destruction_package_file) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ RSpec.describe ::Packages::Conan::PackagePresenter do
|
|||
let_it_be(:conan_package_reference) { '123456789'}
|
||||
|
||||
let(:params) { { package_scope: :instance } }
|
||||
let(:presenter) { described_class.new(package, user, project, params) }
|
||||
|
||||
shared_examples 'no existing package' do
|
||||
context 'when package does not exist' do
|
||||
|
|
@ -21,7 +22,7 @@ RSpec.describe ::Packages::Conan::PackagePresenter do
|
|||
shared_examples 'conan_file_metadatum is not found' do
|
||||
context 'when no conan_file_metadatum exists' do
|
||||
before do
|
||||
package.package_files.each do |file|
|
||||
package.installable_package_files.each do |file|
|
||||
file.conan_file_metadatum.delete
|
||||
file.reload
|
||||
end
|
||||
|
|
@ -32,7 +33,7 @@ RSpec.describe ::Packages::Conan::PackagePresenter do
|
|||
end
|
||||
|
||||
describe '#recipe_urls' do
|
||||
subject { described_class.new(package, user, project, params).recipe_urls }
|
||||
subject { presenter.recipe_urls }
|
||||
|
||||
it_behaves_like 'no existing package'
|
||||
it_behaves_like 'conan_file_metadatum is not found'
|
||||
|
|
@ -71,7 +72,9 @@ RSpec.describe ::Packages::Conan::PackagePresenter do
|
|||
end
|
||||
|
||||
describe '#recipe_snapshot' do
|
||||
subject { described_class.new(package, user, project).recipe_snapshot }
|
||||
let(:params) { {} }
|
||||
|
||||
subject { presenter.recipe_snapshot }
|
||||
|
||||
it_behaves_like 'no existing package'
|
||||
it_behaves_like 'conan_file_metadatum is not found'
|
||||
|
|
@ -180,12 +183,9 @@ RSpec.describe ::Packages::Conan::PackagePresenter do
|
|||
|
||||
describe '#package_snapshot' do
|
||||
let(:reference) { conan_package_reference }
|
||||
let(:params) { { conan_package_reference: reference } }
|
||||
|
||||
subject do
|
||||
described_class.new(
|
||||
package, user, project, conan_package_reference: reference
|
||||
).package_snapshot
|
||||
end
|
||||
subject { presenter.package_snapshot }
|
||||
|
||||
it_behaves_like 'no existing package'
|
||||
it_behaves_like 'conan_file_metadatum is not found'
|
||||
|
|
@ -208,4 +208,22 @@ RSpec.describe ::Packages::Conan::PackagePresenter do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
# TODO when cleaning up packages_installable_package_files, consider removing this context and
|
||||
# add a dummy package file pending destruction on L8
|
||||
context 'with package files pending destruction' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: package) }
|
||||
|
||||
subject { presenter.send(:package_files).to_a }
|
||||
|
||||
it { is_expected.not_to include(package_file_pending_destruction) }
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it { is_expected.to include(package_file_pending_destruction) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,12 +6,12 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
|
|||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:project) { create(:project, creator: user) }
|
||||
let_it_be(:package) { create(:npm_package, :with_build, project: project) }
|
||||
let(:presenter) { described_class.new(package) }
|
||||
|
||||
let_it_be(:user_info) { { name: user.name, avatar_url: user.avatar_url } }
|
||||
|
||||
let(:presenter) { described_class.new(package) }
|
||||
|
||||
let!(:expected_package_files) do
|
||||
package.package_files.map do |file|
|
||||
package.installable_package_files.map do |file|
|
||||
{
|
||||
created_at: file.created_at,
|
||||
download_path: file.download_path,
|
||||
|
|
@ -154,5 +154,21 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
|
|||
expect(presenter.detail_view).to eq expected_package_details
|
||||
end
|
||||
end
|
||||
|
||||
context 'with package files pending destruction' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: package) }
|
||||
|
||||
subject { presenter.detail_view[:package_files].map { |e| e[:id] } }
|
||||
|
||||
it { is_expected.not_to include(package_file_pending_destruction.id) }
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it { is_expected.to include(package_file_pending_destruction.id) }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -95,6 +95,26 @@ RSpec.describe ::Packages::Npm::PackagePresenter do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with package files pending destruction' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: package2, file_sha1: 'pending_destruction_sha1') }
|
||||
|
||||
let(:shasums) { subject.values.map { |v| v.dig(:dist, :shasum) } }
|
||||
|
||||
it 'does not return them' do
|
||||
expect(shasums).not_to include(package_file_pending_destruction.file_sha1)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns them' do
|
||||
expect(shasums).to include(package_file_pending_destruction.file_sha1)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#dist_tags' do
|
||||
|
|
|
|||
|
|
@ -24,6 +24,20 @@ RSpec.describe Packages::Nuget::PackageMetadataPresenter do
|
|||
subject { presenter.archive_url }
|
||||
|
||||
it { is_expected.to end_with(expected_suffix) }
|
||||
|
||||
context 'with package files pending destruction' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: package, file_name: 'pending_destruction.nupkg') }
|
||||
|
||||
it { is_expected.not_to include('pending_destruction.nupkg') }
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it { is_expected.to include('pending_destruction.nupkg') }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#catalog_entry' do
|
||||
|
|
|
|||
|
|
@ -52,5 +52,21 @@ RSpec.describe ::Packages::Pypi::PackagePresenter do
|
|||
|
||||
it_behaves_like 'pypi package presenter'
|
||||
end
|
||||
|
||||
context 'with package files pending destruction' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: package1, file_name: "package_file_pending_destruction") }
|
||||
|
||||
let(:project_or_group) { project }
|
||||
|
||||
it { is_expected.not_to include(package_file_pending_destruction.file_name)}
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it { is_expected.to include(package_file_pending_destruction.file_name)}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -73,6 +73,31 @@ RSpec.describe 'package details' do
|
|||
end
|
||||
end
|
||||
|
||||
context 'with package files pending destruction' do
|
||||
let_it_be(:package_file) { create(:package_file, package: composer_package) }
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: composer_package) }
|
||||
|
||||
let(:package_file_ids) { graphql_data_at(:package, :package_files, :nodes).map { |node| node["id"] } }
|
||||
|
||||
it 'does not return them' do
|
||||
subject
|
||||
|
||||
expect(package_file_ids).to contain_exactly(package_file.to_global_id.to_s)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns them' do
|
||||
subject
|
||||
|
||||
expect(package_file_ids).to contain_exactly(package_file_pending_destruction.to_global_id.to_s, package_file.to_global_id.to_s)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with a batched query' do
|
||||
let_it_be(:conan_package) { create(:conan_package, project: project) }
|
||||
|
||||
|
|
|
|||
|
|
@ -76,6 +76,30 @@ RSpec.describe API::PackageFiles do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with package files pending destruction' do
|
||||
let!(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: package) }
|
||||
|
||||
let(:package_file_ids) { json_response.map { |e| e['id'] } }
|
||||
|
||||
it 'does not return them' do
|
||||
get api(url, user)
|
||||
|
||||
expect(package_file_ids).not_to include(package_file_pending_destruction.id)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns them' do
|
||||
get api(url, user)
|
||||
|
||||
expect(package_file_ids).to include(package_file_pending_destruction.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -149,6 +173,32 @@ RSpec.describe API::PackageFiles do
|
|||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with package file pending destruction' do
|
||||
let!(:package_file_id) { create(:package_file, :pending_destruction, package: package).id }
|
||||
|
||||
before do
|
||||
project.add_maintainer(user)
|
||||
end
|
||||
|
||||
it 'can not be accessed', :aggregate_failures do
|
||||
expect { api_request }.not_to change { package.package_files.count }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'can be accessed', :aggregate_failures do
|
||||
expect { api_request }.to change { package.package_files.count }.by(-1)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:no_content)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -173,6 +173,34 @@ RSpec.describe API::RubygemPackages do
|
|||
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
|
||||
end
|
||||
end
|
||||
|
||||
context 'with package files pending destruction' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, :xml, package: package, file_name: file_name) }
|
||||
|
||||
before do
|
||||
project.update_column(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
|
||||
end
|
||||
|
||||
it 'does not return them' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.body).not_to eq(package_file_pending_destruction.file.file.read)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns them' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.body).to eq(package_file_pending_destruction.file.file.read)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'POST /api/v4/projects/:project_id/packages/rubygems/api/v1/gems/authorize' do
|
||||
|
|
|
|||
|
|
@ -154,6 +154,7 @@ RSpec.describe API::Terraform::Modules::V1::Packages do
|
|||
end
|
||||
|
||||
describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system/:module_version/file' do
|
||||
let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/#{package.version}/file?token=#{token}") }
|
||||
let(:tokens) do
|
||||
{
|
||||
personal_access_token: ::Gitlab::JWTToken.new.tap { |jwt| jwt['token'] = personal_access_token.id }.encoded,
|
||||
|
|
@ -202,7 +203,6 @@ RSpec.describe API::Terraform::Modules::V1::Packages do
|
|||
|
||||
with_them do
|
||||
let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
|
||||
let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/#{package.version}/file?token=#{token}") }
|
||||
let(:snowplow_gitlab_standard_context) { { project: project, user: user, namespace: project.namespace } }
|
||||
|
||||
before do
|
||||
|
|
@ -212,6 +212,41 @@ RSpec.describe API::Terraform::Modules::V1::Packages do
|
|||
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
|
||||
end
|
||||
end
|
||||
|
||||
context 'with package file pending destruction' do
|
||||
let_it_be(:package) { create(:package, package_type: :terraform_module, project: project, name: "module-555/pending-destruction", version: '1.0.0') }
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, :xml, package: package) }
|
||||
let_it_be(:package_file) { create(:package_file, :terraform_module, package: package) }
|
||||
|
||||
let(:token) { tokens[:personal_access_token] }
|
||||
let(:headers) { { 'Authorization' => "Bearer #{token}" } }
|
||||
|
||||
before do
|
||||
project.add_maintainer(user)
|
||||
end
|
||||
|
||||
it 'does not return them' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.body).not_to eq(package_file_pending_destruction.file.file.read)
|
||||
expect(response.body).to eq(package_file.file.file.read)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns them' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.body).to eq(package_file_pending_destruction.file.file.read)
|
||||
expect(response.body).not_to eq(package_file.file.file.read)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'PUT /api/v4/projects/:project_id/packages/terraform/modules/:module_name/:module_system/:module_version/file/authorize' do
|
||||
|
|
|
|||
|
|
@ -265,4 +265,22 @@ RSpec.describe ::Packages::Maven::Metadata::SyncService do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
# TODO When cleaning up packages_installable_package_files, consider adding a
|
||||
# dummy package file pending for destruction on L10/11 and remove this context
|
||||
context 'with package files pending destruction' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: versionless_package_for_versions, file_name: Packages::Maven::Metadata.filename) }
|
||||
|
||||
subject { service.send(:metadata_package_file_for, versionless_package_for_versions) }
|
||||
|
||||
it { is_expected.not_to eq(package_file_pending_destruction) }
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(package_file_pending_destruction) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -198,7 +198,6 @@ RSpec.shared_examples 'Debian Distribution' do |factory, container, can_freeze|
|
|||
describe 'relationships' do
|
||||
it { is_expected.to have_many(:publications).class_name('Packages::Debian::Publication').inverse_of(:distribution).with_foreign_key(:distribution_id) }
|
||||
it { is_expected.to have_many(:packages).class_name('Packages::Package').through(:publications) }
|
||||
it { is_expected.to have_many(:package_files).class_name('Packages::PackageFile').through(:packages) }
|
||||
end
|
||||
end
|
||||
else
|
||||
|
|
@ -229,6 +228,26 @@ RSpec.shared_examples 'Debian Distribution' do |factory, container, can_freeze|
|
|||
it 'returns only files from public packages with same codename' do
|
||||
expect(subject.to_a).to contain_exactly(*public_package_with_same_codename.package_files)
|
||||
end
|
||||
|
||||
context 'with pending destruction package files' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: public_package_with_same_codename) }
|
||||
|
||||
it 'does not return them' do
|
||||
expect(subject.to_a).not_to include(package_file_pending_destruction)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns them' do
|
||||
subject
|
||||
|
||||
expect(subject.to_a).to include(package_file_pending_destruction)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -38,4 +38,28 @@ RSpec.shared_examples 'a package with files' do
|
|||
'fileSha256' => first_file.file_sha256
|
||||
)
|
||||
end
|
||||
|
||||
context 'with package files pending destruction' do
|
||||
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: package) }
|
||||
|
||||
let(:response_package_file_ids) { package_files_response.map { |pf| pf['id'] } }
|
||||
|
||||
it 'does not return them' do
|
||||
expect(package.reload.package_files).to include(package_file_pending_destruction)
|
||||
|
||||
expect(response_package_file_ids).not_to include(package_file_pending_destruction.to_global_id.to_s)
|
||||
end
|
||||
|
||||
context 'with packages_installable_package_files disabled' do
|
||||
before(:context) do
|
||||
stub_feature_flags(packages_installable_package_files: false)
|
||||
end
|
||||
|
||||
it 'returns them' do
|
||||
expect(package.reload.package_files).to include(package_file_pending_destruction)
|
||||
|
||||
expect(response_package_file_ids).to include(package_file_pending_destruction.to_global_id.to_s)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -221,6 +221,7 @@ RSpec.shared_examples 'handling nuget search requests' do |anonymous_requests_ex
|
|||
let_it_be(:packages_c) { create_list(:nuget_package, 5, name: 'Dummy.PackageC', project: project) }
|
||||
let_it_be(:package_d) { create(:nuget_package, name: 'Dummy.PackageD', version: '5.0.5-alpha', project: project) }
|
||||
let_it_be(:package_e) { create(:nuget_package, name: 'Foo.BarE', project: project) }
|
||||
|
||||
let(:search_term) { 'uMmy' }
|
||||
let(:take) { 26 }
|
||||
let(:skip) { 0 }
|
||||
|
|
|
|||
|
|
@ -222,6 +222,37 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'backup create fails' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
file_backup_error = Backup::FileBackupError.new('/tmp', '/tmp/backup/uploads')
|
||||
config = ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash
|
||||
db_file_name = File.join(Gitlab.config.backup.path, 'db', 'database.sql.gz')
|
||||
db_backup_error = Backup::DatabaseBackupError.new(config, db_file_name)
|
||||
|
||||
where(:backup_class, :rake_task, :error) do
|
||||
Backup::Database | 'gitlab:backup:db:create' | db_backup_error
|
||||
Backup::Builds | 'gitlab:backup:builds:create' | file_backup_error
|
||||
Backup::Uploads | 'gitlab:backup:uploads:create' | file_backup_error
|
||||
Backup::Artifacts | 'gitlab:backup:artifacts:create' | file_backup_error
|
||||
Backup::Pages | 'gitlab:backup:pages:create' | file_backup_error
|
||||
Backup::Lfs | 'gitlab:backup:lfs:create' | file_backup_error
|
||||
Backup::Registry | 'gitlab:backup:registry:create' | file_backup_error
|
||||
end
|
||||
|
||||
with_them do
|
||||
before do
|
||||
expect_next_instance_of(backup_class) do |instance|
|
||||
expect(instance).to receive(:dump).and_raise(error)
|
||||
end
|
||||
end
|
||||
|
||||
it "raises an error with message" do
|
||||
expect { run_rake_task(rake_task) }.to output(Regexp.new(error.message)).to_stdout_from_any_process
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'tar creation' do
|
||||
context 'archive file permissions' do
|
||||
it 'sets correct permissions on the tar file' do
|
||||
|
|
|
|||
Loading…
Reference in New Issue