Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
4441a8c8e4
commit
c638142c8c
|
|
@ -331,7 +331,6 @@ Layout/LineLength:
|
|||
- 'app/models/integrations/asana.rb'
|
||||
- 'app/models/integrations/base_chat_notification.rb'
|
||||
- 'app/models/integrations/base_issue_tracker.rb'
|
||||
- 'app/models/integrations/bugzilla.rb'
|
||||
- 'app/models/integrations/chat_message/merge_message.rb'
|
||||
- 'app/models/integrations/chat_message/note_message.rb'
|
||||
- 'app/models/integrations/chat_message/pipeline_message.rb'
|
||||
|
|
|
|||
|
|
@ -4452,7 +4452,6 @@ RSpec/FeatureCategory:
|
|||
- 'spec/models/integrations/base_issue_tracker_spec.rb'
|
||||
- 'spec/models/integrations/base_slack_notification_spec.rb'
|
||||
- 'spec/models/integrations/base_third_party_wiki_spec.rb'
|
||||
- 'spec/models/integrations/bugzilla_spec.rb'
|
||||
- 'spec/models/integrations/buildkite_spec.rb'
|
||||
- 'spec/models/integrations/chat_message/alert_message_spec.rb'
|
||||
- 'spec/models/integrations/chat_message/base_message_spec.rb'
|
||||
|
|
|
|||
|
|
@ -77,7 +77,6 @@ Style/FormatString:
|
|||
- 'app/models/diff_note.rb'
|
||||
- 'app/models/diff_viewer/base.rb'
|
||||
- 'app/models/integrations/asana.rb'
|
||||
- 'app/models/integrations/bugzilla.rb'
|
||||
- 'app/models/integrations/chat_message/pipeline_message.rb'
|
||||
- 'app/models/integrations/confluence.rb'
|
||||
- 'app/models/integrations/custom_issue_tracker.rb'
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import axios from '../lib/utils/axios_utils';
|
|||
import { buildApiUrl } from './api_utils';
|
||||
|
||||
const PROJECTS_PATH = '/api/:version/projects.json';
|
||||
const PROJECT_PATH = '/api/:version/projects/:id';
|
||||
const PROJECT_MEMBERS_PATH = '/api/:version/projects/:id/members';
|
||||
const PROJECT_ALL_MEMBERS_PATH = '/api/:version/projects/:id/members/all';
|
||||
const PROJECT_IMPORT_MEMBERS_PATH = '/api/:version/projects/:id/import_project_members/:project_id';
|
||||
|
|
@ -43,6 +44,12 @@ export function createProject(projectData) {
|
|||
});
|
||||
}
|
||||
|
||||
export function deleteProject(projectId) {
|
||||
const url = buildApiUrl(PROJECT_PATH).replace(':id', projectId);
|
||||
|
||||
return axios.delete(url);
|
||||
}
|
||||
|
||||
export function importProjectMembers(sourceId, targetId) {
|
||||
const url = buildApiUrl(PROJECT_IMPORT_MEMBERS_PATH)
|
||||
.replace(':id', sourceId)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
<script>
|
||||
import { GlBadge, GlTruncate } from '@gitlab/ui';
|
||||
import { stringify } from 'yaml';
|
||||
import { s__ } from '~/locale';
|
||||
import { WORKLOAD_STATUS_BADGE_VARIANTS } from '../constants';
|
||||
import WorkloadDetailsItem from './workload_details_item.vue';
|
||||
|
|
@ -26,6 +27,18 @@ export default {
|
|||
const { annotations } = this.item;
|
||||
return Object.entries(annotations).map(this.getAnnotationsText);
|
||||
},
|
||||
specYaml() {
|
||||
return this.getYamlStringFromJSON(this.item.spec);
|
||||
},
|
||||
statusYaml() {
|
||||
return this.getYamlStringFromJSON(this.item.fullStatus);
|
||||
},
|
||||
annotationsYaml() {
|
||||
return this.getYamlStringFromJSON(this.item.annotations);
|
||||
},
|
||||
hasFullStatus() {
|
||||
return Boolean(this.item.fullStatus);
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
getLabelBadgeText([key, value]) {
|
||||
|
|
@ -35,6 +48,12 @@ export default {
|
|||
getAnnotationsText([key, value]) {
|
||||
return `${key}: ${value}`;
|
||||
},
|
||||
getYamlStringFromJSON(json) {
|
||||
if (!json) {
|
||||
return '';
|
||||
}
|
||||
return stringify(json);
|
||||
},
|
||||
},
|
||||
i18n: {
|
||||
name: s__('KubernetesDashboard|Name'),
|
||||
|
|
@ -42,6 +61,7 @@ export default {
|
|||
labels: s__('KubernetesDashboard|Labels'),
|
||||
status: s__('KubernetesDashboard|Status'),
|
||||
annotations: s__('KubernetesDashboard|Annotations'),
|
||||
spec: s__('KubernetesDashboard|Spec'),
|
||||
},
|
||||
WORKLOAD_STATUS_BADGE_VARIANTS,
|
||||
};
|
||||
|
|
@ -62,19 +82,29 @@ export default {
|
|||
</gl-badge>
|
||||
</div>
|
||||
</workload-details-item>
|
||||
<workload-details-item v-if="item.status" :label="$options.i18n.status">
|
||||
<gl-badge :variant="$options.WORKLOAD_STATUS_BADGE_VARIANTS[item.status]">{{
|
||||
<workload-details-item v-if="item.status && !item.fullStatus" :label="$options.i18n.status">
|
||||
<gl-badge :variant="$options.WORKLOAD_STATUS_BADGE_VARIANTS[item.status]" size="sm">{{
|
||||
item.status
|
||||
}}</gl-badge></workload-details-item
|
||||
}}</gl-badge>
|
||||
</workload-details-item>
|
||||
<workload-details-item v-if="item.fullStatus" :label="$options.i18n.status" collapsible>
|
||||
<template v-if="item.status" #label>
|
||||
<span class="gl-mr-2 gl-font-weight-bold">{{ $options.i18n.status }}</span>
|
||||
<gl-badge :variant="$options.WORKLOAD_STATUS_BADGE_VARIANTS[item.status]" size="sm">{{
|
||||
item.status
|
||||
}}</gl-badge>
|
||||
</template>
|
||||
<pre>{{ statusYaml }}</pre>
|
||||
</workload-details-item>
|
||||
<workload-details-item
|
||||
v-if="itemAnnotations.length"
|
||||
:label="$options.i18n.annotations"
|
||||
collapsible
|
||||
>
|
||||
<workload-details-item v-if="itemAnnotations.length" :label="$options.i18n.annotations">
|
||||
<p
|
||||
v-for="annotation of itemAnnotations"
|
||||
:key="annotation"
|
||||
class="gl-mb-2 gl-overflow-wrap-anywhere"
|
||||
>
|
||||
{{ annotation }}
|
||||
</p>
|
||||
<pre>{{ annotationsYaml }}</pre>
|
||||
</workload-details-item>
|
||||
<workload-details-item v-if="item.spec" :label="$options.i18n.spec" collapsible>
|
||||
<pre>{{ specYaml }}</pre>
|
||||
</workload-details-item>
|
||||
</ul>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -1,18 +1,77 @@
|
|||
<script>
|
||||
import { GlCollapse, GlButton } from '@gitlab/ui';
|
||||
import { __ } from '~/locale';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlCollapse,
|
||||
GlButton,
|
||||
},
|
||||
props: {
|
||||
label: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
collapsible: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
isVisible: false,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
chevronIcon() {
|
||||
return this.isVisible ? 'chevron-down' : 'chevron-right';
|
||||
},
|
||||
collapsibleLabel() {
|
||||
return this.isVisible ? this.$options.i18n.collapse : this.$options.i18n.expand;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
toggleCollapse() {
|
||||
this.isVisible = !this.isVisible;
|
||||
},
|
||||
},
|
||||
i18n: {
|
||||
collapse: __('Collapse'),
|
||||
expand: __('Expand'),
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<li class="gl-line-height-20 gl-py-3 gl-border-b-solid gl-border-b-2 gl-border-b-gray-100">
|
||||
<label class="gl-font-weight-bold gl-mb-2"> {{ label }} </label>
|
||||
<div class="gl-text-gray-500 gl-mb-0">
|
||||
<div
|
||||
:class="{
|
||||
'gl-display-flex gl-flex-wrap gl-justify-content-space-between gl-align-items-center': collapsible,
|
||||
}"
|
||||
>
|
||||
<slot name="label">
|
||||
<label class="gl-font-weight-bold gl-mb-0"> {{ label }} </label>
|
||||
</slot>
|
||||
|
||||
<gl-button
|
||||
v-if="collapsible"
|
||||
:icon="chevronIcon"
|
||||
:aria-label="collapsibleLabel"
|
||||
category="tertiary"
|
||||
size="small"
|
||||
class="gl-ml-auto"
|
||||
@click="toggleCollapse"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<gl-collapse v-if="collapsible" :visible="isVisible">
|
||||
<div v-if="isVisible" class="gl-mt-4">
|
||||
<slot></slot>
|
||||
</div>
|
||||
</gl-collapse>
|
||||
|
||||
<div v-else class="gl-text-gray-500 gl-mb-0 gl-mt-2">
|
||||
<slot></slot>
|
||||
</div>
|
||||
</li>
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ export const apolloProvider = () => {
|
|||
query: k8sPodsQuery,
|
||||
data: {
|
||||
metadata,
|
||||
spec: {},
|
||||
status: {
|
||||
phase: null,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -20,9 +20,9 @@ export const mapWorkloadItem = (item) => {
|
|||
annotations: item.metadata?.annotations || {},
|
||||
labels: item.metadata?.labels || {},
|
||||
};
|
||||
return { status: item.status, metadata };
|
||||
return { status: item.status, spec: item.spec, metadata };
|
||||
}
|
||||
return { status: item.status };
|
||||
return { status: item.status, spec: item.spec };
|
||||
};
|
||||
|
||||
export const mapSetItem = (item) => {
|
||||
|
|
|
|||
|
|
@ -7,8 +7,7 @@ query getK8sDashboardPods($configuration: LocalConfiguration) {
|
|||
labels
|
||||
annotations
|
||||
}
|
||||
status {
|
||||
phase
|
||||
}
|
||||
status
|
||||
spec
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,6 +35,8 @@ export default {
|
|||
labels: pod.metadata?.labels,
|
||||
annotations: pod.metadata?.annotations,
|
||||
kind: s__('KubernetesDashboard|Pod'),
|
||||
spec: pod.spec,
|
||||
fullStatus: pod.status,
|
||||
};
|
||||
}) || []
|
||||
);
|
||||
|
|
|
|||
|
|
@ -63,6 +63,9 @@ export const organizationProjects = [
|
|||
forkingAccessLevel: {
|
||||
stringValue: 'ENABLED',
|
||||
},
|
||||
userPermissions: {
|
||||
removeProject: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'gid://gitlab/Project/7',
|
||||
|
|
@ -86,6 +89,9 @@ export const organizationProjects = [
|
|||
forkingAccessLevel: {
|
||||
stringValue: 'ENABLED',
|
||||
},
|
||||
userPermissions: {
|
||||
removeProject: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'gid://gitlab/Project/6',
|
||||
|
|
@ -109,6 +115,9 @@ export const organizationProjects = [
|
|||
forkingAccessLevel: {
|
||||
stringValue: 'ENABLED',
|
||||
},
|
||||
userPermissions: {
|
||||
removeProject: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'gid://gitlab/Project/5',
|
||||
|
|
@ -132,6 +141,9 @@ export const organizationProjects = [
|
|||
forkingAccessLevel: {
|
||||
stringValue: 'ENABLED',
|
||||
},
|
||||
userPermissions: {
|
||||
removeProject: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'gid://gitlab/Project/1',
|
||||
|
|
@ -155,6 +167,9 @@ export const organizationProjects = [
|
|||
forkingAccessLevel: {
|
||||
stringValue: 'ENABLED',
|
||||
},
|
||||
userPermissions: {
|
||||
removeProject: false,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,9 @@
|
|||
import { GlLoadingIcon, GlEmptyState, GlKeysetPagination } from '@gitlab/ui';
|
||||
import { s__, __ } from '~/locale';
|
||||
import ProjectsList from '~/vue_shared/components/projects_list/projects_list.vue';
|
||||
import { ACTION_DELETE } from '~/vue_shared/components/list_actions/constants';
|
||||
import { DEFAULT_PER_PAGE } from '~/api';
|
||||
import { deleteProject } from '~/rest_api';
|
||||
import { createAlert } from '~/alert';
|
||||
import { SORT_ITEM_NAME, SORT_DIRECTION_ASC } from '../constants';
|
||||
import projectsQuery from '../graphql/queries/projects.query.graphql';
|
||||
|
|
@ -14,6 +16,9 @@ export default {
|
|||
errorMessage: s__(
|
||||
'Organization|An error occurred loading the projects. Please refresh the page to try again.',
|
||||
),
|
||||
deleteErrorMessage: s__(
|
||||
'Organization|An error occurred deleting the project. Please refresh the page to try again.',
|
||||
),
|
||||
emptyState: {
|
||||
title: s__("Organization|You don't have any projects yet."),
|
||||
description: s__(
|
||||
|
|
@ -163,6 +168,20 @@ export default {
|
|||
startCursor,
|
||||
});
|
||||
},
|
||||
setProjectIsDeleting(project, val) {
|
||||
this.$set(project.actionLoadingStates, ACTION_DELETE, val);
|
||||
},
|
||||
async deleteProject(data) {
|
||||
try {
|
||||
this.setProjectIsDeleting(data, true);
|
||||
await deleteProject(data.id);
|
||||
} catch (error) {
|
||||
createAlert({ message: this.$options.i18n.deleteErrorMessage, error, captureError: true });
|
||||
} finally {
|
||||
this.setProjectIsDeleting(data, false);
|
||||
this.$apollo.queries.projects.refetch();
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
|
@ -170,7 +189,12 @@ export default {
|
|||
<template>
|
||||
<gl-loading-icon v-if="isLoading" class="gl-mt-5" size="md" />
|
||||
<div v-else-if="nodes.length">
|
||||
<projects-list :projects="nodes" show-project-icon :list-item-class="listItemClass" />
|
||||
<projects-list
|
||||
:projects="nodes"
|
||||
show-project-icon
|
||||
:list-item-class="listItemClass"
|
||||
@delete="deleteProject"
|
||||
/>
|
||||
<div v-if="pageInfo.hasNextPage || pageInfo.hasPreviousPage" class="gl-text-center gl-mt-5">
|
||||
<gl-keyset-pagination
|
||||
v-bind="pageInfo"
|
||||
|
|
|
|||
|
|
@ -31,6 +31,9 @@ query getOrganizationProjects(
|
|||
forkingAccessLevel {
|
||||
stringValue
|
||||
}
|
||||
userPermissions {
|
||||
removeProject
|
||||
}
|
||||
}
|
||||
pageInfo {
|
||||
...PageInfo
|
||||
|
|
|
|||
|
|
@ -2,6 +2,16 @@ import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
|||
import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/list_actions/constants';
|
||||
import { QUERY_PARAM_END_CURSOR, QUERY_PARAM_START_CURSOR } from './constants';
|
||||
|
||||
const availableProjectActions = (userPermissions) => {
|
||||
const baseActions = [ACTION_EDIT];
|
||||
|
||||
if (userPermissions.removeProject) {
|
||||
return [...baseActions, ACTION_DELETE];
|
||||
}
|
||||
|
||||
return baseActions;
|
||||
};
|
||||
|
||||
export const formatProjects = (projects) =>
|
||||
projects.map(
|
||||
({
|
||||
|
|
@ -11,6 +21,7 @@ export const formatProjects = (projects) =>
|
|||
issuesAccessLevel,
|
||||
forkingAccessLevel,
|
||||
webUrl,
|
||||
userPermissions,
|
||||
...project
|
||||
}) => ({
|
||||
...project,
|
||||
|
|
@ -22,7 +33,10 @@ export const formatProjects = (projects) =>
|
|||
webUrl,
|
||||
isForked: false,
|
||||
editPath: `${webUrl}/edit`,
|
||||
availableActions: [ACTION_EDIT, ACTION_DELETE],
|
||||
availableActions: availableProjectActions(userPermissions),
|
||||
actionLoadingStates: {
|
||||
[ACTION_DELETE]: false,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -36,6 +36,11 @@ export default {
|
|||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
confirmLoading: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
issuesCount: {
|
||||
type: [Number, String],
|
||||
required: false,
|
||||
|
|
@ -74,6 +79,7 @@ export default {
|
|||
attributes: {
|
||||
variant: 'danger',
|
||||
disabled: this.confirmDisabled,
|
||||
loading: this.confirmLoading,
|
||||
'data-testid': 'confirm-delete-button',
|
||||
},
|
||||
},
|
||||
|
|
@ -83,6 +89,15 @@ export default {
|
|||
};
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
confirmLoading(isLoading, wasLoading) {
|
||||
// If the button was loading and now no longer is
|
||||
if (!isLoading && wasLoading) {
|
||||
// Hide the modal
|
||||
this.$emit('change', false);
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
|
|
@ -94,7 +109,7 @@ export default {
|
|||
title-class="gl-text-red-500"
|
||||
:action-primary="modalActionProps.primary"
|
||||
:action-cancel="modalActionProps.cancel"
|
||||
@primary="$emit('primary', $event)"
|
||||
@primary.prevent="$emit('primary')"
|
||||
@change="$emit('change', $event)"
|
||||
>
|
||||
<template #modal-title>{{ $options.i18n.title }}</template>
|
||||
|
|
|
|||
|
|
@ -20,6 +20,9 @@ export default {
|
|||
computed: {
|
||||
...mapState(['artifacts', 'isLoading', 'hasError']),
|
||||
...mapGetters(['title']),
|
||||
hasArtifacts() {
|
||||
return this.artifacts.length > 0;
|
||||
},
|
||||
},
|
||||
created() {
|
||||
this.setEndpoint(this.endpoint);
|
||||
|
|
@ -31,7 +34,12 @@ export default {
|
|||
};
|
||||
</script>
|
||||
<template>
|
||||
<mr-collapsible-extension :title="title" :is-loading="isLoading" :has-error="hasError">
|
||||
<mr-collapsible-extension
|
||||
v-if="isLoading || hasArtifacts || hasError"
|
||||
:title="title"
|
||||
:is-loading="isLoading"
|
||||
:has-error="hasError"
|
||||
>
|
||||
<artifacts-list :artifacts="artifacts" />
|
||||
</mr-collapsible-extension>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -198,6 +198,9 @@ export default {
|
|||
hasActionDelete() {
|
||||
return this.project.availableActions?.includes(ACTION_DELETE);
|
||||
},
|
||||
isActionDeleteLoading() {
|
||||
return this.project.actionLoadingStates[ACTION_DELETE];
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
topicPath(topic) {
|
||||
|
|
@ -390,6 +393,7 @@ export default {
|
|||
v-model="isDeleteModalVisible"
|
||||
:confirm-phrase="project.name"
|
||||
:is-fork="project.isForked"
|
||||
:confirm-loading="isActionDeleteLoading"
|
||||
:merge-requests-count="openMergeRequestsCount"
|
||||
:issues-count="openIssuesCount"
|
||||
:forks-count="forksCount"
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
|
|||
ignore_columns %i[static_objects_external_storage_auth_token], remove_with: '14.9', remove_after: '2022-03-22'
|
||||
ignore_column :web_ide_clientside_preview_enabled, remove_with: '15.11', remove_after: '2023-04-22'
|
||||
ignore_columns %i[instance_administration_project_id instance_administrators_group_id], remove_with: '16.2', remove_after: '2023-06-22'
|
||||
ignore_columns %i[encrypted_ai_access_token encrypted_ai_access_token_iv], remove_with: '16.10', remove_after: '2024-03-22'
|
||||
ignore_columns %i[repository_storages], remove_with: '16.8', remove_after: '2023-12-21'
|
||||
ignore_columns %i[delayed_project_removal lock_delayed_project_removal delayed_group_deletion], remove_with: '16.10', remove_after: '2024-03-22'
|
||||
|
||||
|
|
|
|||
|
|
@ -21,8 +21,9 @@ module Ci
|
|||
when 'latest_released_at_desc' then relation.order_by_latest_released_at_desc
|
||||
when 'latest_released_at_asc' then relation.order_by_latest_released_at_asc
|
||||
when 'created_at_asc' then relation.order_by_created_at_asc
|
||||
when 'created_at_desc' then relation.order_by_created_at_desc
|
||||
else
|
||||
relation.order_by_created_at_desc
|
||||
relation.order_by_star_count(:desc)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ module Ci
|
|||
class Resource < ::ApplicationRecord
|
||||
include PgFullTextSearchable
|
||||
include Gitlab::VisibilityLevel
|
||||
include Sortable
|
||||
|
||||
self.table_name = 'catalog_resources'
|
||||
|
||||
|
|
@ -35,6 +36,15 @@ module Ci
|
|||
scope :order_by_name_asc, -> { reorder(arel_table[:name].asc.nulls_last) }
|
||||
scope :order_by_latest_released_at_desc, -> { reorder(arel_table[:latest_released_at].desc.nulls_last) }
|
||||
scope :order_by_latest_released_at_asc, -> { reorder(arel_table[:latest_released_at].asc.nulls_last) }
|
||||
scope :order_by_star_count, ->(direction) do
|
||||
build_keyset_order_on_joined_column(
|
||||
scope: joins(:project),
|
||||
attribute_name: 'project_star_count',
|
||||
column: Project.arel_table[:star_count],
|
||||
direction: direction,
|
||||
nullable: :nulls_last
|
||||
)
|
||||
end
|
||||
|
||||
delegate :avatar_path, :star_count, :full_path, to: :project
|
||||
|
||||
|
|
|
|||
|
|
@ -7,6 +7,10 @@ module Ci
|
|||
# Only versions which contain valid CI components are included in this table.
|
||||
class Version < ::ApplicationRecord
|
||||
include BulkInsertableAssociations
|
||||
include SemanticVersionable
|
||||
|
||||
semver_method :version
|
||||
validate_semver
|
||||
|
||||
self.table_name = 'catalog_resource_versions'
|
||||
|
||||
|
|
@ -33,8 +37,6 @@ module Ci
|
|||
after_save :update_catalog_resource
|
||||
|
||||
class << self
|
||||
# In the future, we should support semantic versioning.
|
||||
# See https://gitlab.com/gitlab-org/gitlab/-/issues/427286
|
||||
def latest
|
||||
order_by_released_at_desc.first
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,6 +9,14 @@ module Ci
|
|||
include Limitable
|
||||
include EachBatch
|
||||
include BatchNullifyDependentAssociations
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
VALID_REF_REGEX = %r{\A(#{Gitlab::Git::TAG_REF_PREFIX}|#{Gitlab::Git::BRANCH_REF_PREFIX}).+}
|
||||
|
||||
# The only way that ref can be unexpanded after #expand_short_ref runs is if the ref
|
||||
# is ambiguous because both a branch and a tag with the name exist, or it is
|
||||
# ambiguous because neither exists.
|
||||
INVALID_REF_MESSAGE = 'is ambiguous'
|
||||
|
||||
self.limit_name = 'ci_pipeline_schedules'
|
||||
self.limit_scope = :project
|
||||
|
|
@ -21,7 +29,8 @@ module Ci
|
|||
|
||||
validates :cron, unless: :importing?, cron: true, presence: { unless: :importing? }
|
||||
validates :cron_timezone, cron_timezone: true, presence: { unless: :importing? }
|
||||
validates :ref, presence: { unless: :importing? }
|
||||
validates :ref, presence: { unless: :importing? },
|
||||
format: { with: VALID_REF_REGEX, allow_nil: true, message: INVALID_REF_MESSAGE, unless: :importing? }
|
||||
validates :description, presence: true
|
||||
validates :variables, nested_attributes_duplicates: true
|
||||
|
||||
|
|
@ -33,6 +42,8 @@ module Ci
|
|||
scope :owned_by, ->(user) { where(owner: user) }
|
||||
scope :for_project, ->(project_id) { where(project_id: project_id) }
|
||||
|
||||
before_validation :expand_short_ref
|
||||
|
||||
accepts_nested_attributes_for :variables, allow_destroy: true
|
||||
|
||||
alias_attribute :real_next_run, :next_run_at
|
||||
|
|
@ -91,6 +102,21 @@ module Ci
|
|||
|
||||
super
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def expand_short_ref
|
||||
return if ref.blank? || VALID_REF_REGEX.match?(ref) || ambiguous_ref?
|
||||
|
||||
# In case the ref doesn't exist default to the initial value
|
||||
self.ref = project.repository.expand_ref(ref) || ref
|
||||
end
|
||||
|
||||
def ambiguous_ref?
|
||||
strong_memoize_with(:ambiguous_ref, ref) do
|
||||
project.repository.ambiguous_ref?(ref)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -6,16 +6,20 @@ module Ci
|
|||
include Ci::HasVariable
|
||||
include Ci::RawVariable
|
||||
|
||||
ROUTING_FEATURE_FLAG = :ci_partitioning_use_ci_pipeline_variables_routing_table
|
||||
|
||||
belongs_to :pipeline,
|
||||
->(pipeline_variable) { in_partition(pipeline_variable) },
|
||||
partition_foreign_key: :partition_id,
|
||||
inverse_of: :variables
|
||||
|
||||
self.primary_key = :id
|
||||
self.table_name = :p_ci_pipeline_variables
|
||||
self.sequence_name = :ci_pipeline_variables_id_seq
|
||||
|
||||
partitionable scope: :pipeline, partitioned: true
|
||||
partitionable scope: :pipeline, through: {
|
||||
table: :p_ci_pipeline_variables,
|
||||
flag: ROUTING_FEATURE_FLAG
|
||||
}
|
||||
|
||||
alias_attribute :secret_value, :value
|
||||
|
||||
|
|
|
|||
|
|
@ -16,12 +16,22 @@ module Integrations
|
|||
end
|
||||
|
||||
def self.help
|
||||
docs_link = ActionController::Base.helpers.link_to _('Learn more.'), Rails.application.routes.url_helpers.help_page_url('user/project/integrations/bugzilla'), target: '_blank', rel: 'noopener noreferrer'
|
||||
s_("IssueTracker|Use Bugzilla as this project's issue tracker. %{docs_link}").html_safe % { docs_link: docs_link.html_safe }
|
||||
docs_link = ActionController::Base.helpers.link_to(_('Learn more.'),
|
||||
Rails.application.routes.url_helpers.help_page_url('user/project/integrations/bugzilla'),
|
||||
target: '_blank',
|
||||
rel: 'noopener noreferrer')
|
||||
|
||||
help = format(s_("IssueTracker|Use Bugzilla as this project's issue tracker. %{docs_link}").html_safe,
|
||||
docs_link: docs_link.html_safe)
|
||||
help << "<br><br><i>#{attribution_notice}</i>".html_safe
|
||||
end
|
||||
|
||||
def self.to_param
|
||||
'bugzilla'
|
||||
end
|
||||
|
||||
def self.attribution_notice
|
||||
_('The Bugzilla logo is a trademark of the Mozilla Foundation in the U.S. and other countries.')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ module Ml
|
|||
include SemanticVersionable
|
||||
|
||||
semver_method :semver
|
||||
validate_semver
|
||||
|
||||
validates :project, :model, presence: true
|
||||
|
||||
|
|
@ -63,6 +64,11 @@ module Ml
|
|||
end
|
||||
end
|
||||
|
||||
def version=(value)
|
||||
self.semver = value
|
||||
super(value)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def valid_model?
|
||||
|
|
|
|||
|
|
@ -42,6 +42,7 @@ class PersonalAccessToken < ApplicationRecord
|
|||
scope :owner_is_human, -> { includes(:user).references(:user).merge(User.human) }
|
||||
scope :last_used_before, -> (date) { where("last_used_at <= ?", date) }
|
||||
scope :last_used_after, -> (date) { where("last_used_at >= ?", date) }
|
||||
scope :expiring_and_not_notified_without_impersonation, -> { where(["(revoked = false AND expire_notification_delivered = false AND expires_at >= CURRENT_DATE AND expires_at <= :date) and impersonation = false", { date: DAYS_TO_EXPIRE.days.from_now.to_date }]) }
|
||||
|
||||
validates :scopes, presence: true
|
||||
validates :expires_at, presence: true, on: :create, unless: :allow_expires_at_to_be_empty?
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class SentNotification < ApplicationRecord
|
||||
include IgnorableColumns
|
||||
|
||||
ignore_column %i[id_convert_to_bigint], remove_with: '17.0', remove_after: '2024-04-19'
|
||||
belongs_to :project
|
||||
belongs_to :noteable, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
|
||||
belongs_to :recipient, class_name: "User"
|
||||
|
|
|
|||
|
|
@ -168,6 +168,8 @@ class User < MainClusterwide::ApplicationRecord
|
|||
|
||||
has_many :emails
|
||||
has_many :personal_access_tokens, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
|
||||
has_many :expiring_soon_and_unnotified_personal_access_tokens, -> { expiring_and_not_notified_without_impersonation }, class_name: 'PersonalAccessToken'
|
||||
|
||||
has_many :identities, dependent: :destroy, autosave: true # rubocop:disable Cop/ActiveRecordDependent
|
||||
has_many :webauthn_registrations
|
||||
has_many :chat_names, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
|
||||
|
|
@ -615,6 +617,12 @@ class User < MainClusterwide::ApplicationRecord
|
|||
.where('keys.user_id = users.id')
|
||||
.expiring_soon_and_not_notified)
|
||||
end
|
||||
|
||||
scope :with_personal_access_tokens_expiring_soon_and_ids, ->(ids) do
|
||||
where(id: ids)
|
||||
.includes(:expiring_soon_and_unnotified_personal_access_tokens)
|
||||
end
|
||||
|
||||
scope :order_recent_sign_in, -> { reorder(arel_table[:current_sign_in_at].desc.nulls_last) }
|
||||
scope :order_oldest_sign_in, -> { reorder(arel_table[:current_sign_in_at].asc.nulls_last) }
|
||||
scope :order_recent_last_activity, -> { reorder(arel_table[:last_activity_on].desc.nulls_last, arel_table[:id].asc) }
|
||||
|
|
|
|||
|
|
@ -35,7 +35,8 @@ module Ci
|
|||
@version = Ci::Catalog::Resources::Version.new(
|
||||
release: release,
|
||||
catalog_resource: project.catalog_resource,
|
||||
project: project
|
||||
project: project,
|
||||
version: release.tag
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@
|
|||
%p
|
||||
= s_('SlackIntegration|You must do this step only once.')
|
||||
%p
|
||||
= render Pajamas::ButtonComponent.new(href: slack_app_manifest_share_admin_application_settings_path) do
|
||||
= render Pajamas::ButtonComponent.new(href: slack_app_manifest_share_admin_application_settings_path, target: '_blank', button_options: { rel: 'noopener noreferrer' }) do
|
||||
= s_("SlackIntegration|Create Slack app")
|
||||
%hr
|
||||
%h5
|
||||
|
|
@ -63,4 +63,3 @@
|
|||
%p
|
||||
= render Pajamas::ButtonComponent.new(href: slack_app_manifest_download_admin_application_settings_path, icon: 'download') do
|
||||
= s_("SlackIntegration|Download latest manifest file")
|
||||
|
||||
|
|
|
|||
|
|
@ -18,21 +18,18 @@ module PersonalAccessTokens
|
|||
BATCH_SIZE = 100
|
||||
|
||||
def perform(*args)
|
||||
limit_date = PersonalAccessToken::DAYS_TO_EXPIRE.days.from_now.to_date
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord -- We need to specify batch size to avoid timing out of worker
|
||||
loop do
|
||||
tokens = PersonalAccessToken.without_impersonation.expiring_and_not_notified(limit_date)
|
||||
tokens = PersonalAccessToken.expiring_and_not_notified_without_impersonation
|
||||
.select(:user_id).limit(BATCH_SIZE).to_a
|
||||
|
||||
break if tokens.empty?
|
||||
|
||||
users = User.where(id: tokens.pluck(:user_id).uniq)
|
||||
users = User.with_personal_access_tokens_expiring_soon_and_ids(tokens.pluck(:user_id).uniq)
|
||||
|
||||
users.each do |user|
|
||||
with_context(user: user) do
|
||||
expiring_user_tokens = user.personal_access_tokens
|
||||
.without_impersonation.expiring_and_not_notified(limit_date)
|
||||
expiring_user_tokens = user.expiring_soon_and_unnotified_personal_access_tokens
|
||||
|
||||
next if expiring_user_tokens.empty?
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: ci_partitioning_use_ci_pipeline_variables_routing_table
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/439069
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/143334
|
||||
rollout_issue_url: https://gitlab.com/gitlab-com/gl-infra/production/-/issues/17508
|
||||
milestone: '16.10'
|
||||
group: group::pipeline execution
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -9,7 +9,6 @@ Gitlab::Database::Partitioning.register_models(
|
|||
Ci::RunnerManagerBuild,
|
||||
Ci::JobAnnotation,
|
||||
Ci::BuildMetadata,
|
||||
Ci::PipelineVariable,
|
||||
CommitStatus,
|
||||
BatchedGitRefUpdates::Deletion,
|
||||
Users::ProjectVisit,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,138 @@
|
|||
- name: GitLab Duo Chat Beta now available in Premium
|
||||
description: |
|
||||
In 16.8, we made GitLab Duo Chat available for self-managed instances. In 16.9, we are making Chat available to Premium customers while it is still in Beta.
|
||||
|
||||
GitLab Duo Chat can:
|
||||
|
||||
- Explain or summarize issues, epics, and code.
|
||||
- Answer specific questions about these artifacts like "Collect all the arguments raised in comments regarding the solution proposed in this issue."
|
||||
- Generate code or content based on the information in these artifacts. For example, "Can you write documentation for this code?"
|
||||
- Help you start a process. For example, "Create a .gitlab-ci.yml configuration file for testing and building a Ruby on Rails application in a GitLab CI/CD pipeline."
|
||||
- Answer all your DevSecOps related question, whether you are a beginner or an expert. For example, "How can I set up Dynamic Application Security Testing for a REST API?"
|
||||
- Answer follow-up questions so you can iteratively work through all the previous scenarios.
|
||||
|
||||
GitLab Duo Chat is available as a [Beta](https://docs.gitlab.com/ee/policy/experiment-beta-support.html#beta) feature. It is also integrated into our Web IDE and GitLab Workflow extension for VS Code as [Experimental](https://docs.gitlab.com/ee/policy/experiment-beta-support.html#experiment) features. In these IDEs, you can also use [predefined chat commands that help you do standard tasks more quickly](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html#explain-code-in-the-ide) like writing tests.
|
||||
stage: ai-powered
|
||||
self-managed: true
|
||||
gitlab-com: true
|
||||
available_in: [Premium, Ultimate]
|
||||
documentation_link: https://docs.gitlab.com/ee/user/gitlab_duo_chat.html
|
||||
image_url: https://about.gitlab.com/images/16_9/gitlab_duo_chat_beta_now_available_in_premium.png
|
||||
published_at: 2024-02-15
|
||||
release: 16.9
|
||||
|
||||
- name: Improvements to the CI/CD variables user interface
|
||||
description: |
|
||||
In GitLab 16.9, we have released a series of improvements to the CI/CD variables user experience. We have improved the variables creation flow through changes including:
|
||||
|
||||
- [Improved validation when variable values do not meet the requirements](https://gitlab.com/gitlab-org/gitlab/-/issues/365934).
|
||||
- [Help text during variable creation](https://gitlab.com/gitlab-org/gitlab/-/issues/410220).
|
||||
- [Allow resizing of the value field in the variables form](https://gitlab.com/gitlab-org/gitlab/-/issues/434667).
|
||||
|
||||
Other improvements include a new, [optional description field for group and project variables](https://gitlab.com/gitlab-org/gitlab/-/issues/378938) to assist with the management of variables. We have also made it easier to [add or edit multiple variables](https://gitlab.com/gitlab-org/gitlab/-/issues/434666), lowering the friction in the software development workflow and enabling developers to perform their job more efficiently.
|
||||
|
||||
Your [feedback for these changes](https://gitlab.com/gitlab-org/gitlab/-/issues/441177) is always valued and appreciated.
|
||||
stage: verify
|
||||
self-managed: true
|
||||
gitlab-com: true
|
||||
available_in: [Free, Premium, Ultimate]
|
||||
documentation_link: https://docs.gitlab.com/ee/ci/variables/
|
||||
image_url: https://img.youtube.com/vi/gdL2cEp3kw0/hqdefault.jpg
|
||||
published_at: 2024-02-15
|
||||
release: 16.9
|
||||
|
||||
- name: Request changes on merge requests
|
||||
description: |
|
||||
The last part of reviewing a merge request is communicating the outcome. While approving was unambiguous, leaving comments was not. They required the author to read your comments, then determine if the comments were purely informational, or described needed changes. Now, when you complete your review, you can select from three options:
|
||||
|
||||
- **Comment**: Submit general feedback without explicitly approving.
|
||||
- **Approve**: Submit feedback and approve the changes.
|
||||
- **Request changes**: Submit feedback that should be addressed before merging.
|
||||
|
||||
The sidebar now shows the outcome of your review next to your name. Currently, ending your review with **Request changes** doesn't block the merge request from being merged, but it provides extra context to other participants in the merge request.
|
||||
|
||||
You can leave feedback about the **Request changes** feature in our [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/438573).
|
||||
stage: create
|
||||
self-managed: true
|
||||
gitlab-com: true
|
||||
available_in: [Free, Premium, Ultimate]
|
||||
documentation_link: https://docs.gitlab.com/ee/user/project/merge_requests/reviews/#submit-a-review
|
||||
image_url: https://about.gitlab.com/images/16_9/create-request-changes-merge-requests.png
|
||||
published_at: 2024-02-15
|
||||
release: 16.9
|
||||
|
||||
- name: Expanded options for auto-canceling pipelines
|
||||
description: |
|
||||
Currently, to use the [auto-cancel redundant pipeline feature](https://docs.gitlab.com/ee/ci/pipelines/settings.html#auto-cancel-redundant-pipelines), you must set jobs that can be cancelled as [`interruptible: true`](https://docs.gitlab.com/ee/ci/yaml/index.html#interruptible) to determine whether or not a pipeline can be cancelled. But this only applies to jobs that are actively running when GitLab tries to cancel the pipeline. Any jobs that have not yet started (are in "pending" status) are also considered safe to cancel, regardless of their `interruptible` configuration.
|
||||
|
||||
This lack of flexibility hinders users who want more control over which exact jobs can be cancelled by the auto-cancel pipeline feature. To address this limitation, we are pleased to announce the introduction of the `auto_cancel:on_new_commit` keywords with more granular control over job cancellation. If the legacy behavior did not work for you, you now have the option to configure the pipeline to only cancel jobs that are explicitly set with `interruptible: true`, even if they haven't started yet. You can also set jobs to never be automatically cancelled.
|
||||
stage: verify
|
||||
self-managed: true
|
||||
gitlab-com: true
|
||||
available_in: [Free, Premium, Ultimate]
|
||||
documentation_link: https://docs.gitlab.com/ee/ci/yaml/index.html#workflowauto_cancelon_new_commit
|
||||
image_url: https://about.gitlab.com/images/16_9/interruptible.png
|
||||
published_at: 2024-02-15
|
||||
release: 16.9
|
||||
|
||||
- name: Validate Terraform modules from your group or subgroup
|
||||
description: |
|
||||
When using the GitLab Terraform registry, it is important to have a cross-project view of all your modules. Until recently, the user interface has been available only at the project level. If your group had a complex structure, you might have had difficulty finding and validating your modules.
|
||||
|
||||
From GitLab 16.9, you can view all of your group and subgroup modules in GitLab. The increased visibility provides a better understanding of your registry, and decreases the likelihood of name collisions.
|
||||
stage: package
|
||||
self-managed: true
|
||||
gitlab-com: true
|
||||
available_in: [Free, Premium, Ultimate]
|
||||
documentation_link: https://docs.gitlab.com/ee/user/packages/package_registry/#view-packages
|
||||
image_url: https://img.youtube.com/vi/1Ocypvrrdiw/hqdefault.jpg
|
||||
published_at: 2024-02-15
|
||||
release: 16.9
|
||||
|
||||
- name: More detailed security findings in VS Code
|
||||
description: |
|
||||
We've improved how security findings are shown in the [GitLab Workflow extension](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#security-findings) for Visual Studio Code (VS Code).
|
||||
You can now see more details of your security findings that weren't previously shown, including:
|
||||
|
||||
- Full descriptions, with rich-text formatting.
|
||||
- The solution to the vulnerability, if one is available.
|
||||
- A link to the location where the problem occurs in your codebase.
|
||||
- Links to more information about the type of vulnerability discovered.
|
||||
|
||||
We've also:
|
||||
|
||||
- Improved how the extension shows the status of security scans before results are ready.
|
||||
- Made other usability improvements.
|
||||
stage: secure
|
||||
self-managed: true
|
||||
gitlab-com: true
|
||||
available_in: [Ultimate]
|
||||
documentation_link: https://docs.gitlab.com/ee/editor_extensions/visual_studio_code/
|
||||
image_url: https://about.gitlab.com/images/16_9/vs-code-security-finding-details.png
|
||||
published_at: 2024-02-15
|
||||
release: 16.9
|
||||
|
||||
- name: Standards Adherence Report Improvements
|
||||
description: |
|
||||
The [standards adherence report](https://docs.gitlab.com/ee/user/compliance/compliance_center/#view-the-standards-adherence-dashboard), within the
|
||||
[compliance center](https://docs.gitlab.com/ee/user/compliance/compliance_center/), is the destination for compliance teams to monitor their compliance posture.
|
||||
|
||||
In GitLab 16.5, we introduced the report with the GitLab Standard - a set of common compliance requirements all compliance teams should monitor. The standard helps
|
||||
you understand which projects meet these requirements, which ones fall short, and how to bring them into compliance. Over time, we'll be introducing more standards
|
||||
into the reporting.
|
||||
|
||||
In this milestone, we've made some improvements which will make reporting more robust and actionable. These include:
|
||||
|
||||
- Grouping results by project
|
||||
- Grouping results by standard (starting with the GitLab standard)
|
||||
- Filtering by project, compliance framework, name, and standard
|
||||
- Export to CSV (delivered via email)
|
||||
- Improved pagination
|
||||
stage: govern
|
||||
self-managed: true
|
||||
gitlab-com: true
|
||||
available_in: [Ultimate]
|
||||
documentation_link: https://docs.gitlab.com/ee/user/compliance/compliance_center/#standards-adherence-dashboard
|
||||
image_url: https://about.gitlab.com/images/16_9/standards-adherence-grouping.png
|
||||
published_at: 2024-02-15
|
||||
release: 16.9
|
||||
|
|
@ -7,4 +7,19 @@ feature_categories:
|
|||
description: Verification status for DAST Profiles
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/103063
|
||||
milestone: '15.6'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: dast_profile_id
|
||||
table: dast_profiles
|
||||
sharding_key: project_id
|
||||
belongs_to: dast_profile
|
||||
|
|
|
|||
|
|
@ -7,4 +7,19 @@ feature_categories:
|
|||
description: Join table between DAST Profiles and CI Pipelines
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56821
|
||||
milestone: '13.11'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: dast_profile_id
|
||||
table: dast_profiles
|
||||
sharding_key: project_id
|
||||
belongs_to: dast_profile
|
||||
|
|
|
|||
|
|
@ -7,4 +7,19 @@ feature_categories:
|
|||
description: Join Table for Runner tags and DAST Profiles
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108371
|
||||
milestone: '15.8'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: dast_profile_id
|
||||
table: dast_profiles
|
||||
sharding_key: project_id
|
||||
belongs_to: dast_profile
|
||||
|
|
|
|||
|
|
@ -7,4 +7,19 @@ feature_categories:
|
|||
description: Join table between DAST Scanner Profiles and CI Builds
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63362
|
||||
milestone: '14.1'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: dast_scanner_profile_id
|
||||
table: dast_scanner_profiles
|
||||
sharding_key: project_id
|
||||
belongs_to: dast_scanner_profile
|
||||
|
|
|
|||
|
|
@ -7,4 +7,19 @@ feature_categories:
|
|||
description: Secret variables used in DAST on-demand scans
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56067
|
||||
milestone: '13.11'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: dast_site_profile_id
|
||||
table: dast_site_profiles
|
||||
sharding_key: project_id
|
||||
belongs_to: dast_site_profile
|
||||
|
|
|
|||
|
|
@ -7,4 +7,19 @@ feature_categories:
|
|||
description: Join table between DAST Site Profiles and CI Builds
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63362
|
||||
milestone: '14.1'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: dast_site_profile_id
|
||||
table: dast_site_profiles
|
||||
sharding_key: project_id
|
||||
belongs_to: dast_site_profile
|
||||
|
|
|
|||
|
|
@ -7,4 +7,19 @@ feature_categories:
|
|||
description: The site to be validated with a dast_site_token
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41639
|
||||
milestone: '13.4'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: dast_site_token_id
|
||||
table: dast_site_tokens
|
||||
sharding_key: project_id
|
||||
belongs_to: dast_site_token
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddSemVerToCatalogResourcesVersion < Gitlab::Database::Migration[2.2]
|
||||
enable_lock_retries!
|
||||
|
||||
milestone '16.10'
|
||||
# rubocop:disable Migration/AddLimitToTextColumns -- limit is added in 20240213113719_add_text_limit_to_catalog_resource_versions_semver_prerelease
|
||||
|
||||
def change
|
||||
add_column :catalog_resource_versions, :semver_major, :integer
|
||||
add_column :catalog_resource_versions, :semver_minor, :integer
|
||||
add_column :catalog_resource_versions, :semver_patch, :integer
|
||||
add_column :catalog_resource_versions, :semver_prerelease, :text
|
||||
end
|
||||
# rubocop:enable Migration/AddLimitToTextColumns
|
||||
end
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddTextLimitToCatalogResourceVersionsSemverPrerelease < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
|
||||
milestone '16.10'
|
||||
|
||||
def up
|
||||
add_text_limit :catalog_resource_versions, :semver_prerelease, 255
|
||||
end
|
||||
|
||||
def down
|
||||
remove_text_limit :catalog_resource_versions, :semver_prerelease
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# See https://docs.gitlab.com/ee/development/migration_style_guide.html
|
||||
# for more information on how to write migrations for GitLab.
|
||||
|
||||
class SelfHostedSentNotificationsCleanup < Gitlab::Database::Migration[2.2]
|
||||
include Gitlab::Database::MigrationHelpers::ConvertToBigint
|
||||
|
||||
enable_lock_retries!
|
||||
milestone '16.10'
|
||||
|
||||
TABLE = :sent_notifications
|
||||
COLUMNS = [:id]
|
||||
|
||||
def up
|
||||
return if should_skip?
|
||||
return if temp_column_removed?(TABLE, COLUMNS.first)
|
||||
|
||||
cleanup_conversion_of_integer_to_bigint(TABLE, COLUMNS)
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op
|
||||
end
|
||||
|
||||
def should_skip?
|
||||
com_or_dev_or_test_but_not_jh?
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
70164c8c55ac94314a73074b04ec1fc1ad4aaed199347f22904e6691aee870d3
|
||||
|
|
@ -0,0 +1 @@
|
|||
eea390222d35a37a46a1f8997a2b9752f393b836cdb1db9ef45114f5260907b3
|
||||
|
|
@ -0,0 +1 @@
|
|||
07e1a3a02552425f4a5345d9ed3eb7da7f4f09b9f6c9071b1527a1a0e5e3fd10
|
||||
|
|
@ -5516,7 +5516,12 @@ CREATE TABLE catalog_resource_versions (
|
|||
catalog_resource_id bigint NOT NULL,
|
||||
project_id bigint NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
released_at timestamp with time zone DEFAULT '1970-01-01 00:00:00+00'::timestamp with time zone NOT NULL
|
||||
released_at timestamp with time zone DEFAULT '1970-01-01 00:00:00+00'::timestamp with time zone NOT NULL,
|
||||
semver_major integer,
|
||||
semver_minor integer,
|
||||
semver_patch integer,
|
||||
semver_prerelease text,
|
||||
CONSTRAINT check_701bdce47b CHECK ((char_length(semver_prerelease) <= 255))
|
||||
);
|
||||
|
||||
CREATE SEQUENCE catalog_resource_versions_id_seq
|
||||
|
|
|
|||
|
|
@ -576,7 +576,7 @@ To create a rule to allow Gitaly binary execution:
|
|||
```
|
||||
|
||||
1. Restart the service:
|
||||
|
||||
|
||||
```shell
|
||||
systemctl restart fapolicyd
|
||||
```
|
||||
|
|
|
|||
|
|
@ -249,6 +249,7 @@ Example response:
|
|||
"is_shared": false,
|
||||
"runner_type": "project_type",
|
||||
"contacted_at": "2016-01-25T16:39:48.066Z",
|
||||
"maintenance_note": null,
|
||||
"name": null,
|
||||
"online": true,
|
||||
"status": "online",
|
||||
|
|
@ -281,17 +282,18 @@ Update details of a runner.
|
|||
PUT /runners/:id
|
||||
```
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
|-------------------|---------|----------|-------------------------------------------------------------------------------------------------|
|
||||
| `id` | integer | yes | The ID of a runner |
|
||||
| `description` | string | no | The description of the runner |
|
||||
| `active` | boolean | no | Deprecated: Use `paused` instead. Flag indicating whether the runner is allowed to receive jobs |
|
||||
| `paused` | boolean | no | Specifies if the runner should ignore new jobs |
|
||||
| `tag_list` | array | no | The list of tags for the runner |
|
||||
| `run_untagged` | boolean | no | Specifies if the runner can execute untagged jobs |
|
||||
| `locked` | boolean | no | Specifies if the runner is locked |
|
||||
| `access_level` | string | no | The access level of the runner; `not_protected` or `ref_protected` |
|
||||
| `maximum_timeout` | integer | no | Maximum timeout that limits the amount of time (in seconds) that runners can run jobs |
|
||||
| Attribute | Type | Required | Description |
|
||||
|--------------------|---------|----------|-------------------------------------------------------------------------------------------------|
|
||||
| `id` | integer | yes | The ID of a runner |
|
||||
| `description` | string | no | The description of the runner |
|
||||
| `active` | boolean | no | Deprecated: Use `paused` instead. Flag indicating whether the runner is allowed to receive jobs |
|
||||
| `paused` | boolean | no | Specifies if the runner should ignore new jobs |
|
||||
| `tag_list` | array | no | The list of tags for the runner |
|
||||
| `run_untagged` | boolean | no | Specifies if the runner can execute untagged jobs |
|
||||
| `locked` | boolean | no | Specifies if the runner is locked |
|
||||
| `access_level` | string | no | The access level of the runner; `not_protected` or `ref_protected` |
|
||||
| `maximum_timeout` | integer | no | Maximum timeout that limits the amount of time (in seconds) that runners can run jobs |
|
||||
| `maintenance_note` | string | no | Free-form maintenance notes for the runner (1024 characters) |
|
||||
|
||||
```shell
|
||||
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/runners/6" \
|
||||
|
|
@ -318,6 +320,7 @@ Example response:
|
|||
"is_shared": false,
|
||||
"runner_type": "group_type",
|
||||
"contacted_at": "2016-01-25T16:39:48.066Z",
|
||||
"maintenance_note": null,
|
||||
"name": null,
|
||||
"online": true,
|
||||
"status": "online",
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ could also deploy regional GOB instances, or even customer-specific GOB instance
|
|||
arises, and route requests to the desired GOB instance.
|
||||
|
||||
Blue arrows highlight the request path from both GitLab SaaS and GitLab self-managed instances
|
||||
to GOB. All requests pass through the [Cloud Connector Gateway](../cloud_connector/index.md)
|
||||
to GOB. All requests pass through the [Cloud Connector Gateway](../cloud_connector/index.md)
|
||||
public service and on to the private GOB service. GitLab SaaS cells will all have access to the public Cloud Connector Gateway.
|
||||
|
||||
The [Cloud Connector Gateway will be a single entry point Load balancer](../cloud_connector/decisions/001_lb_entry_point.md).
|
||||
|
|
@ -215,7 +215,7 @@ sequenceDiagram
|
|||
It is critically important to keep the body of any observability requests from burdening Rails/Puma. All preAuthHandlers in Workhorse will ensure the body is not forwarded
|
||||
to Rails and will only be forwarded to GOB when auth is successful.
|
||||
|
||||
If we consider the daily data transmitted and stored concerning our observability of GitLab.com, and we extrapolate that across our self-managed customers who might have equally demanding needs, we can get an idea of how much data will pass through Workhorse and Cloud Connector. GitLab.com produces 150M+ metrics series, sampled every 30-60 seconds, and 18-22 TB of logs per day.
|
||||
If we consider the daily data transmitted and stored concerning our observability of GitLab.com, and we extrapolate that across our self-managed customers who might have equally demanding needs, we can get an idea of how much data will pass through Workhorse and Cloud Connector. GitLab.com produces 150M+ metrics series, sampled every 30-60 seconds, and 18-22 TB of logs per day.
|
||||
|
||||
We could assume that any Ultimate tier root-level namespace on GitLab.com or any Ultimate tier self-managed instance could send a similar magnitude of data through `cloud.GitLab.com`.
|
||||
|
||||
|
|
@ -314,7 +314,7 @@ Eventually, this quota management will be mapped to the GitLab Organization cons
|
|||
|
||||
To ensure rate limiting and quota management in the GOB service is enforced correctly, GOB will use the IJWT token to extract relevant information
|
||||
about the customer license. Metadata headers will also be sent as part of the request leg from Workhorse to GOB that provides any additional
|
||||
information required by GOB to fulfill requests and enforce quotas and limits.
|
||||
information required by GOB to fulfill requests and enforce quotas and limits.
|
||||
|
||||
## APIs
|
||||
|
||||
|
|
|
|||
|
|
@ -131,7 +131,7 @@ Organizations in the context of Cells 2.0 will contain the following functionali
|
|||
|
||||
### Organization Access
|
||||
|
||||
See [Organization Users](organization-users.md).
|
||||
See [Organization Users](organization-users.md).
|
||||
|
||||
### Roles and Permissions
|
||||
|
||||
|
|
|
|||
|
|
@ -260,7 +260,7 @@ To publish a new version of the component to the catalog:
|
|||
running the release job.
|
||||
|
||||
After the release job completes successfully, the release is created and the new version
|
||||
is published to the CI/CD catalog.
|
||||
is published to the CI/CD catalog. Tags must use semantic versioning, for example `1.0.0`.
|
||||
|
||||
### Unpublish a component project
|
||||
|
||||
|
|
|
|||
|
|
@ -85,9 +85,9 @@ gdk start
|
|||
tail -f log/llm.log
|
||||
```
|
||||
|
||||
## Testing GitLab Duo Chat against real LLMs locally
|
||||
## Testing GitLab Duo Chat
|
||||
|
||||
Because success of answers to user questions in GitLab Duo Chat heavily depends
|
||||
Because the success of answers to user questions in GitLab Duo Chat heavily depends
|
||||
on toolchain and prompts of each tool, it's common that even a minor change in a
|
||||
prompt or a tool impacts processing of some questions.
|
||||
|
||||
|
|
@ -95,6 +95,25 @@ To make sure that a change in the toolchain doesn't break existing
|
|||
functionality, you can use the following RSpec tests to validate answers to some
|
||||
predefined questions when using real LLMs:
|
||||
|
||||
1. `ee/spec/lib/gitlab/llm/completions/chat_real_requests_spec.rb`
|
||||
This test validates that the zero-shot agent is selecting the correct tools
|
||||
for a set of Chat questions. It checks on the tool selection but does not
|
||||
evaluate the quality of the Chat response.
|
||||
1. `ee/spec/lib/gitlab/llm/chain/agents/zero_shot/qa_evaluation_spec.rb`
|
||||
This test evaluates the quality of a Chat response by passing the question
|
||||
asked along with the Chat-provided answer and context to at least two other
|
||||
LLMs for evaluation. This evaluation is limited to questions about issues and
|
||||
epics only. Learn more about the [GitLab Duo Chat QA Evaluation Test](#gitlab-duo-chat-qa-evaluation-test).
|
||||
|
||||
If you are working on any changes to the GitLab Duo Chat logic, be sure to run
|
||||
the [GitLab Duo Chat CI jobs](#testing-with-ci) the merge request that contains
|
||||
your changes. Some of the CI jobs must be [manually triggered](../../ci/jobs/job_control.md#run-a-manual-job).
|
||||
|
||||
## Testing locally
|
||||
|
||||
To run the QA Evaluation test locally, the following environment variables
|
||||
must be exported:
|
||||
|
||||
```ruby
|
||||
export VERTEX_AI_EMBEDDINGS='true' # if using Vertex embeddings
|
||||
export ANTHROPIC_API_KEY='<key>' # can use dev value of Gitlab::CurrentSettings
|
||||
|
|
@ -104,21 +123,22 @@ export VERTEX_AI_PROJECT='<vertex-project-name>' # can use dev value of Gitlab::
|
|||
REAL_AI_REQUEST=1 bundle exec rspec ee/spec/lib/gitlab/llm/completions/chat_real_requests_spec.rb
|
||||
```
|
||||
|
||||
When you need to update the test questions that require documentation embeddings,
|
||||
make sure a new fixture is generated and committed together with the change.
|
||||
When you update the test questions that require documentation embeddings,
|
||||
make sure you [generate a new fixture](index.md#use-embeddings-in-specs) and
|
||||
commit it together with the change.
|
||||
|
||||
## Running the rspecs tagged with `real_ai_request`
|
||||
## Testing with CI
|
||||
|
||||
The following CI jobs for GitLab project run the rspecs tagged with `real_ai_request`:
|
||||
The following CI jobs for GitLab project run the tests tagged with `real_ai_request`:
|
||||
|
||||
- `rspec-ee unit gitlab-duo-chat-zeroshot`:
|
||||
the job runs `ee/spec/lib/gitlab/llm/completions/chat_real_requests_spec.rb`.
|
||||
The job is optionally triggered and allowed to fail.
|
||||
The job must be manually triggered and is allowed to fail.
|
||||
|
||||
- `rspec-ee unit gitlab-duo-chat-qa`:
|
||||
The job runs the QA evaluation tests in
|
||||
`ee/spec/lib/gitlab/llm/chain/agents/zero_shot/qa_evaluation_spec.rb`.
|
||||
The job is optionally triggered and allowed to fail.
|
||||
The job must be manually triggered and is allowed to fail.
|
||||
Read about [GitLab Duo Chat QA Evaluation Test](#gitlab-duo-chat-qa-evaluation-test).
|
||||
|
||||
- `rspec-ee unit gitlab-duo-chat-qa-fast`:
|
||||
|
|
@ -179,25 +199,30 @@ See [the snippet](https://gitlab.com/gitlab-org/gitlab/-/snippets/3613745) used
|
|||
|
||||
1. For each question, RSpec will regex-match for `CORRECT` or `INCORRECT`.
|
||||
|
||||
#### Collection and tracking of QA evaluations via CI/CD automation
|
||||
#### Collection and tracking of QA evaluation with CI/CD automation
|
||||
|
||||
The `gitlab` project's CI configurations have been setup to
|
||||
run the RSpec,
|
||||
collect the evaluation response as artifacts
|
||||
and execute [a reporter script](https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/duo_chat/reporter.rb)
|
||||
The `gitlab` project's CI configurations have been setup to run the RSpec,
|
||||
collect the evaluation response as artifacts and execute
|
||||
[a reporter script](https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/duo_chat/reporter.rb)
|
||||
that automates collection and tracking of evaluations.
|
||||
|
||||
When `rspec-ee unit gitlab-duo-chat-qa` job runs in a pipeline for a merge request,
|
||||
the reporter script uses the evaluations saved as CI artifacts
|
||||
to generate a Markdown report and posts it as a note in the merge request.
|
||||
|
||||
When `rspec-ee unit gitlab-duo-chat-qa` is run in a pipeline for a commit on `master` branch,
|
||||
the reporter script instead
|
||||
posts the generated report as an issue,
|
||||
saves the evaluations artfacts as a snippet,
|
||||
and updates the tracking issue in
|
||||
[`gitlab-org/ai-powered/ai-framework/qa-evaluation#1`](https://gitlab.com/gitlab-org/ai-powered/ai-framework/qa-evaluation/-/issues/1)
|
||||
in the project [`gitlab-org/ai-powered/ai-framework/qa-evaluation`](https://gitlab.com/gitlab-org/ai-powered/ai-framework/qa-evaluation).
|
||||
To keep track of and compare QA test results over time, you must manually
|
||||
run the `rspec-ee unit gitlab-duo-chat-qa` on the `master` the branch:
|
||||
|
||||
1. Visit the [new pipeline page](https://gitlab.com/gitlab-org/gitlab/-/pipelines/new).
|
||||
1. Select "Run pipeline" to run a pipeline against the `master` branch
|
||||
1. When the pipeline first starts, the `rspec-ee unit gitlab-duo-chat-qa` job under the
|
||||
"Test" stage will not be available. Wait a few minutes for other CI jobs to
|
||||
run and then manually kick off this job by selecting the "Play" icon.
|
||||
|
||||
When the test runs on `master`, the reporter script posts the generated report as an issue,
|
||||
saves the evaluations artfacts as a snippet, and updates the tracking issue in
|
||||
[`GitLab-org/ai-powered/ai-framework/qa-evaluation#1`](https://gitlab.com/gitlab-org/ai-powered/ai-framework/qa-evaluation/-/issues/1)
|
||||
in the project [`GitLab-org/ai-powered/ai-framework/qa-evaluation`](<https://gitlab.com/gitlab-org/ai-powered/ai-framework/qa-evaluation>).
|
||||
|
||||
## GraphQL Subscription
|
||||
|
||||
|
|
|
|||
|
|
@ -112,9 +112,14 @@ Gitlab::CurrentSettings.update!(anthropic_api_key: <insert API key>)
|
|||
|
||||
### Embeddings database
|
||||
|
||||
Embeddings are generated through the [VertexAI text embeddings API](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings). The sections
|
||||
below explain how to populate embeddings in the DB or extract embeddings to be
|
||||
used in specs.
|
||||
Embeddings are generated through the [VertexAI text embeddings API](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings).
|
||||
|
||||
Embeddings for GitLab documentation are updated based on the latest changes
|
||||
Monday through Friday at 05:00 UTC when the
|
||||
[embeddings cron job](https://gitlab.com/gitlab-org/gitlab/-/blob/6742f6bd3970c56a9d5bcd31e3d3dff180c97088/config/initializers/1_settings.rb#L817) runs.
|
||||
|
||||
The sections below explain how to populate embeddings in the DB or extract
|
||||
embeddings to be used in specs.
|
||||
|
||||
#### Set up
|
||||
|
||||
|
|
@ -216,7 +221,7 @@ Therefore, a different setup is required from the [SaaS-only AI features](#test-
|
|||
- To test Self Managed instances, follow [Cloud Activation steps](../../administration/license.md#activate-gitlab-ee) using the cloud activation code you received earlier.
|
||||
- To test SaaS, follow [Activate GitLab Enterprise license](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/main/doc/index.md#use-gitlab-enterprise-features) with your license file.
|
||||
1. Export these environment variables in the same terminal session with `gdk start`:
|
||||
- Note that you can also configure your terminal always export the environment variables (e.g. adding the exports to `~/.bash_profile` or `~/.zshrc`).
|
||||
- Note that you can also configure your terminal always export the environment variables (e.g. adding the exports to `~/.bash_profile` or `~/.zshrc`).
|
||||
|
||||
```shell
|
||||
export AI_GATEWAY_URL=http://0.0.0.0:5052 # URL to the local AI Gateway instance
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ will also be used in the future to provide a uniform way to migrate data
|
|||
between Cells.
|
||||
|
||||
The actual name of the foreign key can be anything but it must reference a row
|
||||
in `projects` or `groups`. The chosen `sharding_key` column must be non-nullable.
|
||||
in `projects` or `groups`. The chosen `sharding_key` column must be non-nullable.
|
||||
|
||||
Setting multiple `sharding_key`, with nullable columns are also allowed, provided that
|
||||
the table has a check constraint that correctly ensures at least one of the keys must be non-nullable for a row in the table.
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ use, and troubleshooting. The documentation evolves continuously. It is updated
|
|||
new products and features, and with improvements for clarity, accuracy, and completeness.
|
||||
|
||||
This policy prevents information silos, making it easier to find information
|
||||
about GitLab products. It also informs decisions about the kinds of content
|
||||
about GitLab products. It also informs decisions about the kinds of content
|
||||
included in the documentation.
|
||||
|
||||
## Topic types
|
||||
|
|
|
|||
|
|
@ -14,7 +14,8 @@ In order to use SemanticVersionable you must first create a database migration t
|
|||
|
||||
```ruby
|
||||
class AddVersionPartsToModelVersions < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
enable_lock_retries!
|
||||
|
||||
milestone '16.9'
|
||||
|
||||
def up
|
||||
|
|
|
|||
|
|
@ -119,9 +119,9 @@ To view your subscription information and a summary of seat counts:
|
|||
1. On the left sidebar, select **Search or go to** and find your group.
|
||||
1. Select **Settings > Billing**.
|
||||
|
||||
- The usage statistics are updated once per day, which may cause a difference between the information
|
||||
- The usage statistics are updated once per day, which may cause a difference between the information
|
||||
in the **Usage Quotas** page and the **Billing page**.
|
||||
- The **Last login** field is updated when a user signs in after they have signed out. If there is an active session
|
||||
- The **Last login** field is updated when a user signs in after they have signed out. If there is an active session
|
||||
when a user re-authenticates (for example, after a 24 hour SAML session timeout), this field is not updated.
|
||||
|
||||
### Search seat usage
|
||||
|
|
|
|||
|
|
@ -147,7 +147,7 @@ If you are assigning a custom role to an existing:
|
|||
code on the projects in the group or subgroup.
|
||||
1. Optional. If you do not know the `id` of the Guest user receiving a custom
|
||||
role, find that `id` by making an [API request](../api/member_roles.md#list-all-member-roles-of-a-group).
|
||||
1. Use the [Group and Project Members API endpoint](../api/members.md#edit-a-member-of-a-group-or-project) to
|
||||
1. Use the [Group and Project Members API endpoint](../api/members.md#edit-a-member-of-a-group-or-project) to
|
||||
associate the member with the Guest+1 role:
|
||||
|
||||
```shell
|
||||
|
|
|
|||
|
|
@ -180,7 +180,9 @@ module API
|
|||
optional :maximum_timeout, type: Integer,
|
||||
desc: 'Maximum timeout that limits the amount of time (in seconds) ' \
|
||||
'that runners can run jobs'
|
||||
at_least_one_of :description, :active, :paused, :tag_list, :run_untagged, :locked, :access_level, :maximum_timeout
|
||||
optional :maintenance_note, type: String,
|
||||
desc: %q(Free-form maintenance notes for the runner (1024 characters))
|
||||
at_least_one_of :description, :active, :paused, :tag_list, :run_untagged, :locked, :access_level, :maximum_timeout, :maintenance_note
|
||||
mutually_exclusive :active, :paused
|
||||
end
|
||||
put ':id' do
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ module API
|
|||
expose :access_level
|
||||
expose :version, :revision, :platform, :architecture
|
||||
expose :contacted_at
|
||||
expose :maintenance_note
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
expose :projects, with: Entities::BasicProjectDetails do |runner, options|
|
||||
|
|
|
|||
|
|
@ -106,9 +106,7 @@ module Search
|
|||
end
|
||||
|
||||
def show_code_search_tab?
|
||||
return true if tab_enabled_for_project?(:blobs)
|
||||
|
||||
project.nil? && show_elasticsearch_tabs? && feature_flag_tab_enabled?(:global_search_code_tab)
|
||||
tab_enabled_for_project?(:blobs)
|
||||
end
|
||||
|
||||
def show_wiki_search_tab?
|
||||
|
|
|
|||
|
|
@ -28566,6 +28566,9 @@ msgstr ""
|
|||
msgid "KubernetesDashboard|Services"
|
||||
msgstr ""
|
||||
|
||||
msgid "KubernetesDashboard|Spec"
|
||||
msgstr ""
|
||||
|
||||
msgid "KubernetesDashboard|StatefulSet"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -33798,12 +33801,18 @@ msgstr ""
|
|||
msgid "ObservabilityMetrics|Search metrics starting with..."
|
||||
msgstr ""
|
||||
|
||||
msgid "ObservabilityMetrics|Select attributes"
|
||||
msgstr ""
|
||||
|
||||
msgid "ObservabilityMetrics|Type"
|
||||
msgstr ""
|
||||
|
||||
msgid "ObservabilityMetrics|Value"
|
||||
msgstr ""
|
||||
|
||||
msgid "ObservabilityMetrics|all"
|
||||
msgstr ""
|
||||
|
||||
msgid "ObservabilityMetrics|is like"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -34519,6 +34528,9 @@ msgstr ""
|
|||
msgid "Organization|An error occurred creating an organization. Please try again."
|
||||
msgstr ""
|
||||
|
||||
msgid "Organization|An error occurred deleting the project. Please refresh the page to try again."
|
||||
msgstr ""
|
||||
|
||||
msgid "Organization|An error occurred loading the groups. Please refresh the page to try again."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -49632,6 +49644,9 @@ msgstr[1] ""
|
|||
msgid "The API key used by GitLab for accessing the Spam Check service endpoint."
|
||||
msgstr ""
|
||||
|
||||
msgid "The Bugzilla logo is a trademark of the Mozilla Foundation in the U.S. and other countries."
|
||||
msgstr ""
|
||||
|
||||
msgid "The CSV export will be created in the background. Once finished, it will be sent to %{email} in an attachment."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@
|
|||
"@gitlab/favicon-overlay": "2.0.0",
|
||||
"@gitlab/fonts": "^1.3.0",
|
||||
"@gitlab/svgs": "3.83.0",
|
||||
"@gitlab/ui": "^74.6.0",
|
||||
"@gitlab/ui": "^74.7.0",
|
||||
"@gitlab/visual-review-tools": "1.7.3",
|
||||
"@gitlab/web-ide": "^0.0.1-dev-20240214084918",
|
||||
"@mattiasbuelens/web-streams-adapter": "^0.1.0",
|
||||
|
|
|
|||
|
|
@ -255,7 +255,7 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
|
|||
|
||||
context 'when a pipeline schedule has no variables' do
|
||||
let(:basic_param) do
|
||||
{ description: 'updated_desc', cron: '0 1 * * *', cron_timezone: 'UTC', ref: 'patch-x', active: true }
|
||||
{ description: 'updated_desc', cron: '0 1 * * *', cron_timezone: 'UTC', ref: 'master', active: true }
|
||||
end
|
||||
|
||||
context 'when params include one variable' do
|
||||
|
|
@ -309,7 +309,7 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
|
|||
|
||||
context 'when a pipeline schedule has one variable' do
|
||||
let(:basic_param) do
|
||||
{ description: 'updated_desc', cron: '0 1 * * *', cron_timezone: 'UTC', ref: 'patch-x', active: true }
|
||||
{ description: 'updated_desc', cron: '0 1 * * *', cron_timezone: 'UTC', ref: 'master', active: true }
|
||||
end
|
||||
|
||||
let!(:pipeline_schedule_variable) do
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
FactoryBot.define do
|
||||
factory :ci_catalog_resource_version, class: 'Ci::Catalog::Resources::Version' do
|
||||
version { '1.0.0' }
|
||||
|
||||
catalog_resource factory: :ci_catalog_resource
|
||||
project { catalog_resource.project }
|
||||
release { association :release, project: project }
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ FactoryBot.define do
|
|||
factory :ci_pipeline_schedule, class: 'Ci::PipelineSchedule' do
|
||||
cron { '0 1 * * *' }
|
||||
cron_timezone { Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE }
|
||||
ref { 'master' }
|
||||
ref { "#{Gitlab::Git::BRANCH_REF_PREFIX}master" }
|
||||
active { true }
|
||||
description { "pipeline schedule" }
|
||||
project
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe 'CI/CD Catalog releases', :js, feature_category: :pipeline_composition do
|
||||
RSpec.describe 'CI/CD Catalog releases', :js, feature_category: :pipeline_composition, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/432824' do
|
||||
let_it_be(:tag_name) { 'catalog_release_tag' }
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be_with_reload(:namespace) { create(:group) }
|
||||
|
|
|
|||
|
|
@ -39,11 +39,11 @@ RSpec.describe Ci::Catalog::Resources::VersionsFinder, feature_category: :pipeli
|
|||
end
|
||||
|
||||
context 'with name parameter' do
|
||||
let(:name) { 'v1.0' }
|
||||
let(:name) { '1.0.0' }
|
||||
|
||||
it 'returns the version that matches the name' do
|
||||
expect(execute.count).to eq(1)
|
||||
expect(execute.first.name).to eq('v1.0')
|
||||
expect(execute.first.name).to eq('1.0.0')
|
||||
end
|
||||
|
||||
context 'when no version matches the name' do
|
||||
|
|
|
|||
|
|
@ -81,6 +81,22 @@ describe('~/api/projects_api.js', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('deleteProject', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(axios, 'delete');
|
||||
});
|
||||
|
||||
it('deletes to the correct URL', () => {
|
||||
const expectedUrl = `/api/v7/projects/${projectId}`;
|
||||
|
||||
mock.onDelete(expectedUrl).replyOnce(HTTP_STATUS_OK);
|
||||
|
||||
return projectsApi.deleteProject(projectId).then(() => {
|
||||
expect(axios.delete).toHaveBeenCalledWith(expectedUrl);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('importProjectMembers', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(axios, 'post');
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import { GlCollapse, GlButton } from '@gitlab/ui';
|
||||
import WorkloadDetailsItem from '~/kubernetes_dashboard/components/workload_details_item.vue';
|
||||
|
||||
let wrapper;
|
||||
|
|
@ -6,29 +7,87 @@ let wrapper;
|
|||
const propsData = {
|
||||
label: 'name',
|
||||
};
|
||||
const slots = {
|
||||
const defaultSlots = {
|
||||
default: '<b>slot value</b>',
|
||||
label: `<label>${propsData.label}</label>`,
|
||||
};
|
||||
|
||||
const createWrapper = () => {
|
||||
const createWrapper = ({ collapsible, slots = defaultSlots } = {}) => {
|
||||
wrapper = shallowMount(WorkloadDetailsItem, {
|
||||
propsData,
|
||||
propsData: {
|
||||
...propsData,
|
||||
collapsible,
|
||||
},
|
||||
slots,
|
||||
});
|
||||
};
|
||||
|
||||
const findLabel = () => wrapper.findComponent('label');
|
||||
const findCollapsible = () => wrapper.findComponent(GlCollapse);
|
||||
const findCollapsibleButton = () => wrapper.findComponent(GlButton);
|
||||
|
||||
describe('Workload details item component', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper();
|
||||
});
|
||||
|
||||
it('renders the correct label', () => {
|
||||
expect(findLabel().text()).toBe(propsData.label);
|
||||
describe('by default', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper();
|
||||
});
|
||||
|
||||
it('renders the correct label', () => {
|
||||
expect(findLabel().text()).toBe(propsData.label);
|
||||
});
|
||||
|
||||
it('renders default slot content', () => {
|
||||
expect(wrapper.html()).toContain(defaultSlots.default);
|
||||
});
|
||||
});
|
||||
|
||||
it('renders slot content', () => {
|
||||
expect(wrapper.html()).toContain(slots.default);
|
||||
describe('when collapsible is true', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper({ collapsible: true });
|
||||
});
|
||||
|
||||
it('renders collapsible component that is not visible', () => {
|
||||
expect(findCollapsible().props('visible')).toBe(false);
|
||||
});
|
||||
|
||||
it('renders the collapsible button component', () => {
|
||||
expect(findCollapsibleButton().props('icon')).toBe('chevron-right');
|
||||
expect(findCollapsibleButton().attributes('aria-label')).toBe('Expand');
|
||||
});
|
||||
|
||||
describe('when expanded', () => {
|
||||
beforeEach(() => {
|
||||
findCollapsibleButton().vm.$emit('click');
|
||||
});
|
||||
|
||||
it('collapsible is visible', () => {
|
||||
expect(findCollapsible().props('visible')).toBe(true);
|
||||
});
|
||||
|
||||
it('updates the collapsible button component', () => {
|
||||
expect(findCollapsibleButton().props('icon')).toBe('chevron-down');
|
||||
expect(findCollapsibleButton().attributes('aria-label')).toBe('Collapse');
|
||||
});
|
||||
|
||||
it('renders default slot content inside the collapsible', () => {
|
||||
expect(findCollapsible().html()).toContain(defaultSlots.default);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when label slot is provided', () => {
|
||||
const labelSlot = '<span>custom value</span>';
|
||||
|
||||
beforeEach(() => {
|
||||
createWrapper({ slots: { label: labelSlot } });
|
||||
});
|
||||
|
||||
it('renders label slot content', () => {
|
||||
expect(wrapper.html()).toContain(labelSlot);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import { mockPodsTableItems } from '../graphql/mock_data';
|
|||
|
||||
let wrapper;
|
||||
|
||||
const defaultItem = mockPodsTableItems[0];
|
||||
const defaultItem = mockPodsTableItems[2];
|
||||
|
||||
const createWrapper = (item = defaultItem) => {
|
||||
wrapper = shallowMount(WorkloadDetails, {
|
||||
|
|
@ -24,30 +24,51 @@ const findAllBadges = () => wrapper.findAllComponents(GlBadge);
|
|||
const findBadge = (at) => findAllBadges().at(at);
|
||||
|
||||
describe('Workload details component', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper();
|
||||
describe('when minimal fields are provided', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper();
|
||||
});
|
||||
|
||||
it.each`
|
||||
label | data | collapsible | index
|
||||
${'Name'} | ${defaultItem.name} | ${false} | ${0}
|
||||
${'Kind'} | ${defaultItem.kind} | ${false} | ${1}
|
||||
${'Labels'} | ${'key=value'} | ${false} | ${2}
|
||||
${'Status'} | ${defaultItem.status} | ${false} | ${3}
|
||||
${'Annotations'} | ${'annotation: text\nanother: text'} | ${true} | ${4}
|
||||
`('renders a list item for $label', ({ label, data, collapsible, index }) => {
|
||||
expect(findWorkloadDetailsItem(index).props('label')).toBe(label);
|
||||
expect(findWorkloadDetailsItem(index).text()).toMatchInterpolatedText(data);
|
||||
expect(findWorkloadDetailsItem(index).props('collapsible')).toBe(collapsible);
|
||||
});
|
||||
|
||||
it('renders a badge for each of the labels', () => {
|
||||
const label = 'key=value';
|
||||
expect(findAllBadges()).toHaveLength(2);
|
||||
expect(findBadge(0).text()).toBe(label);
|
||||
});
|
||||
|
||||
it('renders a badge for the status value', () => {
|
||||
const { status } = defaultItem;
|
||||
expect(findBadge(1).text()).toBe(status);
|
||||
expect(findBadge(1).props('variant')).toBe(WORKLOAD_STATUS_BADGE_VARIANTS[status]);
|
||||
});
|
||||
});
|
||||
|
||||
it.each`
|
||||
label | data | index
|
||||
${'Name'} | ${defaultItem.name} | ${0}
|
||||
${'Kind'} | ${defaultItem.kind} | ${1}
|
||||
${'Labels'} | ${'key=value'} | ${2}
|
||||
${'Status'} | ${defaultItem.status} | ${3}
|
||||
${'Annotations'} | ${'annotation: text another: text'} | ${4}
|
||||
`('renders a list item for each not empty value', ({ label, data, index }) => {
|
||||
expect(findWorkloadDetailsItem(index).props('label')).toBe(label);
|
||||
expect(findWorkloadDetailsItem(index).text()).toMatchInterpolatedText(data);
|
||||
});
|
||||
describe('when additional fields are provided', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper(mockPodsTableItems[0]);
|
||||
});
|
||||
|
||||
it('renders a badge for each of the labels', () => {
|
||||
const label = 'key=value';
|
||||
expect(findBadge(0).text()).toBe(label);
|
||||
});
|
||||
|
||||
it('renders a badge for the status value', () => {
|
||||
const { status } = defaultItem;
|
||||
expect(findBadge(1).text()).toBe(status);
|
||||
expect(findBadge(1).props('variant')).toBe(WORKLOAD_STATUS_BADGE_VARIANTS[status]);
|
||||
it.each`
|
||||
label | yaml | index
|
||||
${'Status'} | ${'phase: Running\nready: true\nrestartCount: 4'} | ${3}
|
||||
${'Annotations'} | ${'annotation: text\nanother: text'} | ${4}
|
||||
${'Spec'} | ${'restartPolicy: Never\nterminationGracePeriodSeconds: 30'} | ${5}
|
||||
`('renders a collapsible list item for $label with the yaml code', ({ label, yaml, index }) => {
|
||||
expect(findWorkloadDetailsItem(index).props('label')).toBe(label);
|
||||
expect(findWorkloadDetailsItem(index).text()).toBe(yaml);
|
||||
expect(findWorkloadDetailsItem(index).props('collapsible')).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const runningPod = {
|
||||
status: { phase: 'Running' },
|
||||
status: { phase: 'Running', ready: true, restartCount: 4 },
|
||||
metadata: {
|
||||
name: 'pod-1',
|
||||
namespace: 'default',
|
||||
|
|
@ -7,6 +7,7 @@ const runningPod = {
|
|||
labels: { key: 'value' },
|
||||
annotations: { annotation: 'text', another: 'text' },
|
||||
},
|
||||
spec: { restartPolicy: 'Never', terminationGracePeriodSeconds: 30 },
|
||||
};
|
||||
const pendingPod = {
|
||||
status: { phase: 'Pending' },
|
||||
|
|
@ -14,9 +15,10 @@ const pendingPod = {
|
|||
name: 'pod-2',
|
||||
namespace: 'new-namespace',
|
||||
creationTimestamp: '2023-11-21T11:50:59Z',
|
||||
labels: {},
|
||||
annotations: {},
|
||||
labels: { key: 'value' },
|
||||
annotations: { annotation: 'text', another: 'text' },
|
||||
},
|
||||
spec: {},
|
||||
};
|
||||
const succeededPod = {
|
||||
status: { phase: 'Succeeded' },
|
||||
|
|
@ -27,6 +29,7 @@ const succeededPod = {
|
|||
labels: {},
|
||||
annotations: {},
|
||||
},
|
||||
spec: {},
|
||||
};
|
||||
const failedPod = {
|
||||
status: { phase: 'Failed' },
|
||||
|
|
@ -37,6 +40,7 @@ const failedPod = {
|
|||
labels: {},
|
||||
annotations: {},
|
||||
},
|
||||
spec: {},
|
||||
};
|
||||
|
||||
export const k8sPodsMock = [runningPod, runningPod, pendingPod, succeededPod, failedPod, failedPod];
|
||||
|
|
@ -69,24 +73,29 @@ export const mockPodsTableItems = [
|
|||
labels: { key: 'value' },
|
||||
annotations: { annotation: 'text', another: 'text' },
|
||||
kind: 'Pod',
|
||||
spec: { restartPolicy: 'Never', terminationGracePeriodSeconds: 30 },
|
||||
fullStatus: { phase: 'Running', ready: true, restartCount: 4 },
|
||||
},
|
||||
{
|
||||
name: 'pod-1',
|
||||
namespace: 'default',
|
||||
status: 'Running',
|
||||
age: '114d',
|
||||
labels: {},
|
||||
annotations: {},
|
||||
labels: { key: 'value' },
|
||||
annotations: { annotation: 'text', another: 'text' },
|
||||
kind: 'Pod',
|
||||
spec: {},
|
||||
fullStatus: { phase: 'Running', ready: true, restartCount: 4 },
|
||||
},
|
||||
{
|
||||
name: 'pod-2',
|
||||
namespace: 'new-namespace',
|
||||
status: 'Pending',
|
||||
age: '1d',
|
||||
labels: {},
|
||||
annotations: {},
|
||||
labels: { key: 'value' },
|
||||
annotations: { annotation: 'text', another: 'text' },
|
||||
kind: 'Pod',
|
||||
spec: {},
|
||||
},
|
||||
{
|
||||
name: 'pod-3',
|
||||
|
|
@ -96,6 +105,7 @@ export const mockPodsTableItems = [
|
|||
labels: {},
|
||||
annotations: {},
|
||||
kind: 'Pod',
|
||||
spec: {},
|
||||
},
|
||||
{
|
||||
name: 'pod-4',
|
||||
|
|
@ -105,6 +115,7 @@ export const mockPodsTableItems = [
|
|||
labels: {},
|
||||
annotations: {},
|
||||
kind: 'Pod',
|
||||
spec: {},
|
||||
},
|
||||
{
|
||||
name: 'pod-4',
|
||||
|
|
@ -114,6 +125,7 @@ export const mockPodsTableItems = [
|
|||
labels: {},
|
||||
annotations: {},
|
||||
kind: 'Pod',
|
||||
spec: {},
|
||||
},
|
||||
];
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,9 @@ import NewProjectButton from '~/organizations/shared/components/new_project_butt
|
|||
import projectsQuery from '~/organizations/shared/graphql/queries/projects.query.graphql';
|
||||
import { formatProjects } from '~/organizations/shared/utils';
|
||||
import ProjectsList from '~/vue_shared/components/projects_list/projects_list.vue';
|
||||
import { ACTION_DELETE } from '~/vue_shared/components/list_actions/constants';
|
||||
import { createAlert } from '~/alert';
|
||||
import { deleteProject } from '~/api/projects_api';
|
||||
import { DEFAULT_PER_PAGE } from '~/api';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
|
|
@ -19,6 +21,7 @@ import {
|
|||
} from '~/organizations/mock_data';
|
||||
|
||||
jest.mock('~/alert');
|
||||
jest.mock('~/api/projects_api');
|
||||
|
||||
Vue.use(VueApollo);
|
||||
|
||||
|
|
@ -301,4 +304,80 @@ describe('ProjectsView', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Deleting project', () => {
|
||||
const MOCK_PROJECT = formatProjects(nodes)[0];
|
||||
|
||||
describe('when API call is successful', () => {
|
||||
beforeEach(async () => {
|
||||
deleteProject.mockResolvedValueOnce(Promise.resolve());
|
||||
|
||||
createComponent();
|
||||
jest.spyOn(wrapper.vm.$apollo.queries.projects, 'refetch');
|
||||
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('calls deleteProject and properly sets project.isDeleting to true before the promise resolves', () => {
|
||||
findProjectsList().vm.$emit('delete', MOCK_PROJECT);
|
||||
|
||||
expect(deleteProject).toHaveBeenCalledWith(MOCK_PROJECT.id);
|
||||
expect(MOCK_PROJECT.actionLoadingStates[ACTION_DELETE]).toBe(true);
|
||||
});
|
||||
|
||||
it('does not call createAlert', async () => {
|
||||
findProjectsList().vm.$emit('delete', MOCK_PROJECT);
|
||||
await waitForPromises();
|
||||
|
||||
expect(createAlert).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('calls refetch and properly sets project.isDeleting to false when the promise resolves', async () => {
|
||||
findProjectsList().vm.$emit('delete', MOCK_PROJECT);
|
||||
await waitForPromises();
|
||||
|
||||
expect(MOCK_PROJECT.actionLoadingStates[ACTION_DELETE]).toBe(false);
|
||||
expect(wrapper.vm.$apollo.queries.projects.refetch).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when API call is not successful', () => {
|
||||
const error = new Error();
|
||||
|
||||
beforeEach(async () => {
|
||||
deleteProject.mockRejectedValue(error);
|
||||
|
||||
createComponent();
|
||||
jest.spyOn(wrapper.vm.$apollo.queries.projects, 'refetch');
|
||||
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('calls deleteProject and properly sets project.isDeleting to true before the promise resolves', () => {
|
||||
findProjectsList().vm.$emit('delete', MOCK_PROJECT);
|
||||
|
||||
expect(deleteProject).toHaveBeenCalledWith(MOCK_PROJECT.id);
|
||||
expect(MOCK_PROJECT.actionLoadingStates[ACTION_DELETE]).toBe(true);
|
||||
});
|
||||
|
||||
it('does call createAlert', async () => {
|
||||
findProjectsList().vm.$emit('delete', MOCK_PROJECT);
|
||||
await waitForPromises();
|
||||
|
||||
expect(createAlert).toHaveBeenCalledWith({
|
||||
message: 'An error occurred deleting the project. Please refresh the page to try again.',
|
||||
error,
|
||||
captureError: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('calls refetch and properly sets project.isDeleting to false when the promise resolves', async () => {
|
||||
findProjectsList().vm.$emit('delete', MOCK_PROJECT);
|
||||
await waitForPromises();
|
||||
|
||||
expect(MOCK_PROJECT.actionLoadingStates[ACTION_DELETE]).toBe(false);
|
||||
expect(wrapper.vm.$apollo.queries.projects.refetch).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
|||
import { organizationProjects, organizationGroups } from '~/organizations/mock_data';
|
||||
|
||||
describe('formatProjects', () => {
|
||||
it('correctly formats the projects', () => {
|
||||
it('correctly formats the projects with delete permissions', () => {
|
||||
const [firstMockProject] = organizationProjects;
|
||||
const formattedProjects = formatProjects(organizationProjects);
|
||||
const [firstFormattedProject] = formattedProjects;
|
||||
|
|
@ -16,7 +16,31 @@ describe('formatProjects', () => {
|
|||
issuesAccessLevel: firstMockProject.issuesAccessLevel.stringValue,
|
||||
forkingAccessLevel: firstMockProject.forkingAccessLevel.stringValue,
|
||||
availableActions: [ACTION_EDIT, ACTION_DELETE],
|
||||
actionLoadingStates: {
|
||||
[ACTION_DELETE]: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(formattedProjects.length).toBe(organizationProjects.length);
|
||||
});
|
||||
|
||||
it('correctly formats the projects without delete permissions', () => {
|
||||
const nonDeletableProject = organizationProjects[organizationProjects.length - 1];
|
||||
const formattedProjects = formatProjects(organizationProjects);
|
||||
const nonDeletableFormattedProject = formattedProjects[formattedProjects.length - 1];
|
||||
|
||||
expect(nonDeletableFormattedProject).toMatchObject({
|
||||
id: getIdFromGraphQLId(nonDeletableProject.id),
|
||||
name: nonDeletableProject.nameWithNamespace,
|
||||
mergeRequestsAccessLevel: nonDeletableProject.mergeRequestsAccessLevel.stringValue,
|
||||
issuesAccessLevel: nonDeletableProject.issuesAccessLevel.stringValue,
|
||||
forkingAccessLevel: nonDeletableProject.forkingAccessLevel.stringValue,
|
||||
availableActions: [ACTION_EDIT],
|
||||
actionLoadingStates: {
|
||||
[ACTION_DELETE]: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(formattedProjects.length).toBe(organizationProjects.length);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { GlFormInput, GlModal, GlAlert } from '@gitlab/ui';
|
||||
import { nextTick } from 'vue';
|
||||
import { mountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import DeleteModal from '~/projects/components/shared/delete_modal.vue';
|
||||
import { __, sprintf } from '~/locale';
|
||||
|
|
@ -17,6 +18,7 @@ describe('DeleteModal', () => {
|
|||
mergeRequestsCount: 2,
|
||||
forksCount: 3,
|
||||
starsCount: 4,
|
||||
confirmLoading: false,
|
||||
};
|
||||
|
||||
const createComponent = (propsData) => {
|
||||
|
|
@ -143,10 +145,12 @@ describe('DeleteModal', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('emits `primary` event', () => {
|
||||
it('emits `primary` with .prevent event', () => {
|
||||
createComponent();
|
||||
|
||||
findGlModal().vm.$emit('primary');
|
||||
findGlModal().vm.$emit('primary', {
|
||||
preventDefault: jest.fn(),
|
||||
});
|
||||
|
||||
expect(wrapper.emitted('primary')).toEqual([[]]);
|
||||
});
|
||||
|
|
@ -164,4 +168,17 @@ describe('DeleteModal', () => {
|
|||
|
||||
expect(wrapper.findByTestId('modal-footer-slot').exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('when confirmLoading switches from true to false, emits `change event`', async () => {
|
||||
createComponent({ confirmLoading: true });
|
||||
|
||||
// setProps is justified here because we are testing the component's
|
||||
// reactive behavior which constitutes an exception
|
||||
// See https://docs.gitlab.com/ee/development/fe_guide/style/vue.html#setting-component-state
|
||||
wrapper.setProps({ confirmLoading: false });
|
||||
|
||||
await nextTick();
|
||||
|
||||
expect(wrapper.emitted('change')).toEqual([[false]]);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -57,7 +57,6 @@ describe('Merge Requests Artifacts list app', () => {
|
|||
beforeEach(() => {
|
||||
createComponent();
|
||||
store.dispatch('requestArtifacts');
|
||||
return nextTick();
|
||||
});
|
||||
|
||||
it('renders a loading icon', () => {
|
||||
|
|
@ -84,7 +83,6 @@ describe('Merge Requests Artifacts list app', () => {
|
|||
data: artifacts,
|
||||
status: HTTP_STATUS_OK,
|
||||
});
|
||||
return nextTick();
|
||||
});
|
||||
|
||||
it('renders a title with the number of artifacts', () => {
|
||||
|
|
@ -107,12 +105,27 @@ describe('Merge Requests Artifacts list app', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('with 0 artifacts', () => {
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
mock.onGet(FAKE_ENDPOINT).reply(HTTP_STATUS_OK, [], {});
|
||||
store.dispatch('receiveArtifactsSuccess', {
|
||||
data: [],
|
||||
status: HTTP_STATUS_OK,
|
||||
});
|
||||
});
|
||||
|
||||
it('does not render', () => {
|
||||
expect(findTitle().exists()).toBe(false);
|
||||
expect(findButtons().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with error', () => {
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
mock.onGet(FAKE_ENDPOINT).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR, {}, {});
|
||||
store.dispatch('receiveArtifactsError');
|
||||
return nextTick();
|
||||
});
|
||||
|
||||
it('renders the error state', () => {
|
||||
|
|
|
|||
|
|
@ -366,6 +366,7 @@ describe('ProjectsListItem', () => {
|
|||
project: {
|
||||
...project,
|
||||
availableActions: [ACTION_EDIT, ACTION_DELETE],
|
||||
actionLoadingStates: { [ACTION_DELETE]: false },
|
||||
isForked: true,
|
||||
editPath,
|
||||
},
|
||||
|
|
@ -400,6 +401,7 @@ describe('ProjectsListItem', () => {
|
|||
issuesCount: '0',
|
||||
forksCount: '0',
|
||||
starsCount: '0',
|
||||
confirmLoading: false,
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -21,11 +21,11 @@ RSpec.describe Resolvers::Ci::Catalog::Resources::VersionsResolver, feature_cate
|
|||
end
|
||||
|
||||
context 'when name argument is provided' do
|
||||
let(:name) { 'v1.0' }
|
||||
let(:name) { '1.0.0' }
|
||||
|
||||
it 'returns the version that matches the name' do
|
||||
expect(result.items.size).to eq(1)
|
||||
expect(result.items.first.name).to eq('v1.0')
|
||||
expect(result.items.first.name).to eq('1.0.0')
|
||||
end
|
||||
|
||||
context 'when no version matches the name' do
|
||||
|
|
|
|||
|
|
@ -447,7 +447,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
|
|||
|
||||
aggregate_failures do
|
||||
expect(pipeline_schedule.description).to eq('Schedule Description')
|
||||
expect(pipeline_schedule.ref).to eq('master')
|
||||
expect(pipeline_schedule.ref).to eq('refs/heads/master')
|
||||
expect(pipeline_schedule.cron).to eq('0 4 * * 0')
|
||||
expect(pipeline_schedule.cron_timezone).to eq('UTC')
|
||||
expect(pipeline_schedule.active).to eq(false)
|
||||
|
|
|
|||
|
|
@ -3,10 +3,13 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Search::Navigation, feature_category: :global_search do
|
||||
let(:user) { instance_double(User) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
|
||||
let(:project_double) { instance_double(Project) }
|
||||
let(:group_double) { instance_double(Group) }
|
||||
let(:group) { nil }
|
||||
let(:options) { {} }
|
||||
let(:search_navigation) { described_class.new(user: user, project: project, options: options) }
|
||||
let(:search_navigation) { described_class.new(user: user, project: project, group: group, options: options) }
|
||||
|
||||
describe '#tab_enabled_for_project?' do
|
||||
let(:project) { project_double }
|
||||
|
|
@ -72,22 +75,19 @@ RSpec.describe Search::Navigation, feature_category: :global_search do
|
|||
end
|
||||
|
||||
context 'for code tab' do
|
||||
where(:feature_flag_enabled, :show_elasticsearch_tabs, :project, :tab_enabled, :condition) do
|
||||
false | false | nil | false | false
|
||||
true | true | nil | true | true
|
||||
true | false | nil | false | false
|
||||
false | true | nil | false | false
|
||||
false | false | ref(:project_double) | true | true
|
||||
true | false | ref(:project_double) | false | false
|
||||
where(:project, :group, :tab_enabled_for_project, :condition) do
|
||||
nil | nil | false | false
|
||||
nil | ref(:group_double) | false | false
|
||||
ref(:project_double) | nil | true | true
|
||||
ref(:project_double) | nil | false | false
|
||||
end
|
||||
|
||||
with_them do
|
||||
let(:options) { { show_elasticsearch_tabs: show_elasticsearch_tabs } }
|
||||
let(:options) { {} }
|
||||
|
||||
it 'data item condition is set correctly' do
|
||||
allow(search_navigation).to receive(:feature_flag_tab_enabled?)
|
||||
.with(:global_search_code_tab).and_return(feature_flag_enabled)
|
||||
allow(search_navigation).to receive(:tab_enabled_for_project?).with(:blobs).and_return(tab_enabled)
|
||||
allow(search_navigation).to receive(:tab_enabled_for_project?)
|
||||
.with(:blobs).and_return(tab_enabled_for_project)
|
||||
|
||||
expect(tabs[:blobs][:condition]).to eq(condition)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,96 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe SelfHostedSentNotificationsCleanup, feature_category: :database do
|
||||
after do
|
||||
connection = described_class.new.connection
|
||||
connection.execute('ALTER TABLE sent_notifications DROP COLUMN IF EXISTS id_convert_to_bigint')
|
||||
end
|
||||
|
||||
describe '#up' do
|
||||
context 'when is GitLab.com, dev, or test' do
|
||||
before do
|
||||
connection = described_class.new.connection
|
||||
connection.execute('ALTER TABLE sent_notifications DROP COLUMN IF EXISTS id_convert_to_bigint')
|
||||
end
|
||||
|
||||
it 'does nothing' do
|
||||
# rubocop: disable RSpec/AnyInstanceOf -- This is the easiest way to test this method
|
||||
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
|
||||
# rubocop: enable RSpec/AnyInstanceOf
|
||||
|
||||
sent_notifications = table(:sent_notifications)
|
||||
|
||||
disable_migrations_output do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
sent_notifications.reset_column_information
|
||||
|
||||
expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be nil
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
sent_notifications.reset_column_information
|
||||
|
||||
expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be nil
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when is a self-host customer with the temporary column already dropped' do
|
||||
before do
|
||||
connection = described_class.new.connection
|
||||
connection.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE bigint')
|
||||
connection.execute('ALTER TABLE sent_notifications DROP COLUMN IF EXISTS id_convert_to_bigint')
|
||||
end
|
||||
|
||||
it 'does nothing' do
|
||||
# rubocop: disable RSpec/AnyInstanceOf -- This is the easiest way to test this method
|
||||
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
|
||||
# rubocop: enable RSpec/AnyInstanceOf
|
||||
|
||||
sent_notifications = table(:sent_notifications)
|
||||
disable_migrations_output do
|
||||
migrate!
|
||||
end
|
||||
|
||||
expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
|
||||
expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when is a self-host with the temporary columns' do
|
||||
before do
|
||||
connection = described_class.new.connection
|
||||
connection.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE bigint')
|
||||
connection.execute('ALTER TABLE sent_notifications ADD COLUMN IF NOT EXISTS id_convert_to_bigint integer')
|
||||
end
|
||||
|
||||
it 'drops the temporary columns' do
|
||||
# rubocop: disable RSpec/AnyInstanceOf -- This is the easiest way to test this method
|
||||
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
|
||||
# rubocop: enable RSpec/AnyInstanceOf
|
||||
|
||||
sent_notifications = table(:sent_notifications)
|
||||
|
||||
disable_migrations_output do
|
||||
sent_notifications.reset_column_information
|
||||
|
||||
expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
|
||||
expect(sent_notifications.columns.find do |c|
|
||||
c.name == 'id_convert_to_bigint'
|
||||
end.sql_type).to eq('integer')
|
||||
migrate!
|
||||
sent_notifications.reset_column_information
|
||||
|
||||
expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
|
||||
expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -6,12 +6,21 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
|
|||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:namespace) { create(:group) }
|
||||
let_it_be(:public_namespace_project) do
|
||||
create(:project, :public, namespace: namespace, name: 'A public namespace project')
|
||||
create(:project, :public, namespace: namespace, name: 'A public namespace project', star_count: 10)
|
||||
end
|
||||
|
||||
let_it_be(:public_project) do
|
||||
create(:project, :public, name: 'B public test project', star_count: 20)
|
||||
end
|
||||
|
||||
let_it_be(:namespace_project_a) do
|
||||
create(:project, namespace: namespace, name: 'Test namespace project', star_count: 30)
|
||||
end
|
||||
|
||||
let_it_be(:namespace_project_b) do
|
||||
create(:project, namespace: namespace, name: 'X namespace Project', star_count: 40)
|
||||
end
|
||||
|
||||
let_it_be(:public_project) { create(:project, :public, name: 'B public test project') }
|
||||
let_it_be(:namespace_project_a) { create(:project, namespace: namespace, name: 'Test namespace project') }
|
||||
let_it_be(:namespace_project_b) { create(:project, namespace: namespace, name: 'X namespace Project') }
|
||||
let_it_be(:project_noaccess) { create(:project, namespace: namespace, name: 'Project with no access') }
|
||||
let_it_be(:internal_project) { create(:project, :internal, name: 'Internal project') }
|
||||
|
||||
|
|
@ -94,6 +103,14 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
|
|||
internal_resource.update!(created_at: tomorrow + 1)
|
||||
end
|
||||
|
||||
context 'when there is no sort parameter' do
|
||||
let_it_be(:sort) { nil }
|
||||
|
||||
it 'contains catalog resource sorted by star_count descending' do
|
||||
is_expected.to eq([private_namespace_resource, public_resource_b, public_resource_a, internal_resource])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the sort is created_at ascending' do
|
||||
let_it_be(:sort) { :created_at_asc }
|
||||
|
||||
|
|
|
|||
|
|
@ -5,9 +5,9 @@ require 'spec_helper'
|
|||
RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
|
||||
let_it_be(:current_user) { create(:user) }
|
||||
|
||||
let_it_be(:project_a) { create(:project, name: 'A') }
|
||||
let_it_be(:project_b) { create(:project, name: 'B') }
|
||||
let_it_be(:project_c) { create(:project, name: 'C', description: 'B') }
|
||||
let_it_be(:project_a) { create(:project, name: 'A', star_count: 20) }
|
||||
let_it_be(:project_b) { create(:project, name: 'B', star_count: 10) }
|
||||
let_it_be(:project_c) { create(:project, name: 'C', description: 'B', star_count: 30) }
|
||||
|
||||
let_it_be_with_reload(:resource_a) do
|
||||
create(:ci_catalog_resource, project: project_a, latest_released_at: '2023-02-01T00:00:00Z')
|
||||
|
|
@ -122,6 +122,22 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'order_by_star_count_desc' do
|
||||
it 'returns catalog resources sorted by project star count in descending order' do
|
||||
ordered_resources = described_class.order_by_star_count(:desc)
|
||||
|
||||
expect(ordered_resources).to eq([resource_c, resource_a, resource_b])
|
||||
end
|
||||
end
|
||||
|
||||
describe 'order_by_star_count_asc' do
|
||||
it 'returns catalog resources sorted by project star count in ascending order' do
|
||||
ordered_resources = described_class.order_by_star_count(:asc)
|
||||
|
||||
expect(ordered_resources).to eq([resource_b, resource_a, resource_c])
|
||||
end
|
||||
end
|
||||
|
||||
describe 'authorized catalog resources' do
|
||||
let_it_be(:namespace) { create(:group) }
|
||||
let_it_be(:other_namespace) { create(:group) }
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::Catalog::Resources::Version, type: :model, feature_category: :pipeline_composition do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
include_context 'when there are catalog resources with versions'
|
||||
|
||||
it { is_expected.to belong_to(:release) }
|
||||
|
|
@ -17,6 +19,27 @@ RSpec.describe Ci::Catalog::Resources::Version, type: :model, feature_category:
|
|||
it { is_expected.to validate_presence_of(:release) }
|
||||
it { is_expected.to validate_presence_of(:catalog_resource) }
|
||||
it { is_expected.to validate_presence_of(:project) }
|
||||
|
||||
describe 'semver validation' do
|
||||
where(:version, :valid, :semver_major, :semver_minor, :semver_patch, :semver_prerelease) do
|
||||
'1' | false | nil | nil | nil | nil
|
||||
'1.2' | false | nil | nil | nil | nil
|
||||
'1.2.3' | true | 1 | 2 | 3 | nil
|
||||
'1.2.3-beta' | true | 1 | 2 | 3 | 'beta'
|
||||
'1.2.3.beta' | false | nil | nil | nil | nil
|
||||
end
|
||||
|
||||
with_them do
|
||||
let(:catalog_version) { build(:ci_catalog_resource_version, version: version) }
|
||||
|
||||
it do
|
||||
expect(catalog_version.semver_major).to be semver_major
|
||||
expect(catalog_version.semver_minor).to be semver_minor
|
||||
expect(catalog_version.semver_patch).to be semver_patch
|
||||
expect(catalog_version.semver_prerelease).to eq semver_prerelease
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.for_catalog resources' do
|
||||
|
|
@ -29,10 +52,10 @@ RSpec.describe Ci::Catalog::Resources::Version, type: :model, feature_category:
|
|||
|
||||
describe '.by_name' do
|
||||
it 'returns the version that matches the name' do
|
||||
versions = described_class.by_name('v1.0')
|
||||
versions = described_class.by_name('1.0.0')
|
||||
|
||||
expect(versions.count).to eq(1)
|
||||
expect(versions.first.name).to eq('v1.0')
|
||||
expect(versions.first.name).to eq('1.0.0')
|
||||
end
|
||||
|
||||
context 'when no version matches the name' do
|
||||
|
|
@ -144,8 +167,8 @@ RSpec.describe Ci::Catalog::Resources::Version, type: :model, feature_category:
|
|||
|
||||
describe '#readme' do
|
||||
it 'returns the correct readme for the version' do
|
||||
expect(v1_0.readme.data).to include('Readme v1.0')
|
||||
expect(v1_1.readme.data).to include('Readme v1.1')
|
||||
expect(v1_0.readme.data).to include('Readme 1.0.0')
|
||||
expect(v1_1.readme.data).to include('Readme 1.1.0')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -3,9 +3,10 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration do
|
||||
let_it_be_with_reload(:project) { create_default(:project) }
|
||||
let_it_be_with_reload(:project) { create_default(:project, :repository) }
|
||||
let_it_be(:repository) { project.repository }
|
||||
|
||||
subject { build(:ci_pipeline_schedule) }
|
||||
subject { build(:ci_pipeline_schedule, project: project) }
|
||||
|
||||
it { is_expected.to belong_to(:project) }
|
||||
it { is_expected.to belong_to(:owner) }
|
||||
|
|
@ -25,31 +26,150 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
|
|||
|
||||
it_behaves_like 'cleanup by a loose foreign key' do
|
||||
let!(:parent) { create(:user) }
|
||||
let!(:model) { create(:ci_pipeline_schedule, owner: parent) }
|
||||
let!(:model) { create(:ci_pipeline_schedule, owner: parent, project: project) }
|
||||
end
|
||||
|
||||
describe 'validations' do
|
||||
it 'does not allow invalid cron patterns' do
|
||||
pipeline_schedule = build(:ci_pipeline_schedule, cron: '0 0 0 * *')
|
||||
pipeline_schedule = build(:ci_pipeline_schedule, cron: '0 0 0 * *', project: project)
|
||||
|
||||
expect(pipeline_schedule).not_to be_valid
|
||||
end
|
||||
|
||||
it 'does not allow invalid cron patterns' do
|
||||
pipeline_schedule = build(:ci_pipeline_schedule, cron_timezone: 'invalid')
|
||||
pipeline_schedule = build(:ci_pipeline_schedule, cron_timezone: 'invalid', project: project)
|
||||
|
||||
expect(pipeline_schedule).not_to be_valid
|
||||
end
|
||||
|
||||
it 'does not allow empty variable key' do
|
||||
pipeline_schedule = build(:ci_pipeline_schedule, variables_attributes: [{ secret_value: 'test_value' }])
|
||||
pipeline_schedule = build(:ci_pipeline_schedule,
|
||||
variables_attributes: [{ secret_value: 'test_value' }],
|
||||
project: project)
|
||||
|
||||
expect(pipeline_schedule).not_to be_valid
|
||||
end
|
||||
|
||||
context 'ref is invalid' do
|
||||
let_it_be(:ref) { 'ambiguous' }
|
||||
|
||||
before_all do
|
||||
repository.add_tag(project.creator, ref, 'master')
|
||||
repository.add_branch(project.creator, ref, 'master')
|
||||
end
|
||||
|
||||
context 'when an short ref record is being updated' do
|
||||
let(:new_description) { 'some description' }
|
||||
let(:ref) { 'other' }
|
||||
let(:pipeline_schedule) do
|
||||
build(:ci_pipeline_schedule, cron: ' 0 0 * * * ', ref: ref, project: project)
|
||||
end
|
||||
|
||||
before do
|
||||
repository.add_branch(project.creator, ref, 'master')
|
||||
pipeline_schedule.save!(validate: false)
|
||||
end
|
||||
|
||||
it 'updates the ref' do
|
||||
pipeline_schedule.update!(description: new_description)
|
||||
|
||||
expect(pipeline_schedule.reload.ref).to eq("#{Gitlab::Git::BRANCH_REF_PREFIX}#{ref}")
|
||||
expect(pipeline_schedule.description).to eq(new_description)
|
||||
end
|
||||
|
||||
context 'when an existing record has no ref' do
|
||||
let(:pipeline_schedule) do
|
||||
build(:ci_pipeline_schedule,
|
||||
cron: ' 0 0 * * * ',
|
||||
ref: nil,
|
||||
project: project,
|
||||
importing: true)
|
||||
end
|
||||
|
||||
it 'updates the record' do
|
||||
pipeline_schedule.update!(description: new_description)
|
||||
expect(pipeline_schedule.reload.description).to eq(new_description)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'ref is branch and tag' do
|
||||
let(:pipeline_schedule) { build(:ci_pipeline_schedule, ref: ref, project: project) }
|
||||
|
||||
it 'does not allow ambiguous ref' do
|
||||
pipeline_schedule.valid?
|
||||
|
||||
expect(pipeline_schedule.errors.full_messages)
|
||||
.to include("Ref is ambiguous")
|
||||
end
|
||||
|
||||
context 'importing is enabled' do
|
||||
let(:pipeline_schedule) do
|
||||
build(:ci_pipeline_schedule, ref: ref, project: project, importing: true)
|
||||
end
|
||||
|
||||
it 'does not validate the ref' do
|
||||
expect(pipeline_schedule)
|
||||
.to be_valid
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'ref is not a branch or tag' do
|
||||
let(:ref) { 'unknown' }
|
||||
let(:pipeline_schedule) { build(:ci_pipeline_schedule, ref: ref, project: project) }
|
||||
|
||||
it 'does not allow wrong ref' do
|
||||
pipeline_schedule.valid?
|
||||
|
||||
expect(pipeline_schedule.errors.full_messages)
|
||||
.to include("Ref is ambiguous")
|
||||
end
|
||||
|
||||
context 'importing is enabled' do
|
||||
let(:pipeline_schedule) do
|
||||
build(:ci_pipeline_schedule, ref: ref, project: project, importing: true)
|
||||
end
|
||||
|
||||
it 'does not validate the ref' do
|
||||
expect(pipeline_schedule)
|
||||
.to be_valid
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an existing record has a valid ref' do
|
||||
let(:new_description) { 'some description' }
|
||||
let(:pipeline_schedule) do
|
||||
build(:ci_pipeline_schedule, cron: ' 0 0 * * * ', project: project)
|
||||
end
|
||||
|
||||
it 'updates the record' do
|
||||
pipeline_schedule.update!(description: new_description)
|
||||
expect(pipeline_schedule.reload.description).to eq(new_description)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a record is being created' do
|
||||
let(:ref) { 'master' }
|
||||
let(:pipeline_schedule) do
|
||||
build(:ci_pipeline_schedule, cron: ' 0 0 * * * ', project: project, ref: ref)
|
||||
end
|
||||
|
||||
before do
|
||||
repository.add_branch(project.creator, ref, ref)
|
||||
end
|
||||
|
||||
it 'expands the ref' do
|
||||
pipeline_schedule.save!
|
||||
expect(pipeline_schedule.ref).to eq("#{Gitlab::Git::BRANCH_REF_PREFIX}#{ref}")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when active is false' do
|
||||
it 'does not allow nullified ref' do
|
||||
pipeline_schedule = build(:ci_pipeline_schedule, :inactive, ref: nil)
|
||||
pipeline_schedule = build(:ci_pipeline_schedule, :inactive, ref: nil, project: project)
|
||||
|
||||
expect(pipeline_schedule).not_to be_valid
|
||||
end
|
||||
|
|
@ -57,7 +177,7 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
|
|||
|
||||
context 'when cron contains trailing whitespaces' do
|
||||
it 'strips the attribute' do
|
||||
pipeline_schedule = build(:ci_pipeline_schedule, cron: ' 0 0 * * * ')
|
||||
pipeline_schedule = build(:ci_pipeline_schedule, cron: ' 0 0 * * * ', project: project)
|
||||
|
||||
expect(pipeline_schedule).to be_valid
|
||||
expect(pipeline_schedule.cron).to eq('0 0 * * *')
|
||||
|
|
@ -70,7 +190,7 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
|
|||
|
||||
let!(:pipeline_schedule) do
|
||||
travel_to(1.day.ago) do
|
||||
create(:ci_pipeline_schedule, :hourly)
|
||||
create(:ci_pipeline_schedule, :hourly, project: project)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -91,7 +211,7 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
|
|||
subject { described_class.preloaded }
|
||||
|
||||
before do
|
||||
create_list(:ci_pipeline_schedule, 3)
|
||||
create_list(:ci_pipeline_schedule, 3, project: project)
|
||||
end
|
||||
|
||||
it 'preloads the associations' do
|
||||
|
|
@ -105,8 +225,8 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
|
|||
|
||||
describe '.owned_by' do
|
||||
let(:user) { create(:user) }
|
||||
let!(:owned_pipeline_schedule) { create(:ci_pipeline_schedule, owner: user) }
|
||||
let!(:other_pipeline_schedule) { create(:ci_pipeline_schedule) }
|
||||
let!(:owned_pipeline_schedule) { create(:ci_pipeline_schedule, owner: user, project: project) }
|
||||
let!(:other_pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
|
||||
|
||||
subject { described_class.owned_by(user) }
|
||||
|
||||
|
|
@ -116,7 +236,6 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
|
|||
end
|
||||
|
||||
describe '.for_project' do
|
||||
let(:project) { create(:project) }
|
||||
let!(:project_pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
|
||||
let!(:other_pipeline_schedule) { create(:ci_pipeline_schedule) }
|
||||
|
||||
|
|
@ -129,7 +248,7 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
|
|||
|
||||
describe '#set_next_run_at' do
|
||||
let(:now) { Time.zone.local(2021, 3, 2, 1, 0) }
|
||||
let(:pipeline_schedule) { create(:ci_pipeline_schedule, cron: "0 1 * * *") }
|
||||
let(:pipeline_schedule) { create(:ci_pipeline_schedule, cron: "0 1 * * *", project: project) }
|
||||
|
||||
it 'calls fallback method next_run_at if there is no plan limit' do
|
||||
allow(Settings).to receive(:cron_jobs).and_return({ 'pipeline_schedule_worker' => { 'cron' => "0 1 2 3 *" } })
|
||||
|
|
@ -144,8 +263,13 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
|
|||
end
|
||||
|
||||
context 'when there are two different pipeline schedules in different time zones' do
|
||||
let(:pipeline_schedule_1) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'Eastern Time (US & Canada)') }
|
||||
let(:pipeline_schedule_2) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'UTC') }
|
||||
let(:pipeline_schedule_1) do
|
||||
create(:ci_pipeline_schedule, :weekly, cron_timezone: 'Eastern Time (US & Canada)', project: project)
|
||||
end
|
||||
|
||||
let(:pipeline_schedule_2) do
|
||||
create(:ci_pipeline_schedule, :weekly, cron_timezone: 'UTC', project: project)
|
||||
end
|
||||
|
||||
it 'sets different next_run_at' do
|
||||
expect(pipeline_schedule_1.next_run_at).not_to eq(pipeline_schedule_2.next_run_at)
|
||||
|
|
@ -154,7 +278,7 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
|
|||
end
|
||||
|
||||
describe '#schedule_next_run!' do
|
||||
let!(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly) }
|
||||
let!(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project) }
|
||||
|
||||
before do
|
||||
pipeline_schedule.update_column(:next_run_at, nil)
|
||||
|
|
@ -179,7 +303,7 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
|
|||
end
|
||||
|
||||
describe '#job_variables' do
|
||||
let!(:pipeline_schedule) { create(:ci_pipeline_schedule) }
|
||||
let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
|
||||
|
||||
let!(:pipeline_schedule_variables) do
|
||||
create_list(:ci_pipeline_schedule_variable, 2, pipeline_schedule: pipeline_schedule)
|
||||
|
|
@ -289,13 +413,13 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
|
|||
|
||||
context 'loose foreign key on ci_pipeline_schedules.project_id' do
|
||||
it_behaves_like 'cleanup by a loose foreign key' do
|
||||
let!(:parent) { create(:project) }
|
||||
let_it_be(:parent) { create(:project, :repository) }
|
||||
let!(:model) { create(:ci_pipeline_schedule, project: parent) }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'before_destroy' do
|
||||
let_it_be_with_reload(:pipeline_schedule) { create(:ci_pipeline_schedule, cron: ' 0 0 * * * ') }
|
||||
let_it_be_with_reload(:pipeline_schedule) { create(:ci_pipeline_schedule, cron: ' 0 0 * * * ', project: project) }
|
||||
let_it_be_with_reload(:pipeline) { create(:ci_pipeline, pipeline_schedule: pipeline_schedule) }
|
||||
|
||||
it 'nullifys associated pipelines' do
|
||||
|
|
|
|||
|
|
@ -38,4 +38,26 @@ RSpec.describe Ci::PipelineVariable, feature_category: :continuous_integration d
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'routing table switch' do
|
||||
context 'with ff disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_partitioning_use_ci_pipeline_variables_routing_table: false)
|
||||
end
|
||||
|
||||
it 'uses the legacy table' do
|
||||
expect(described_class.table_name).to eq('ci_pipeline_variables')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with ff enabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_partitioning_use_ci_pipeline_variables_routing_table: true)
|
||||
end
|
||||
|
||||
it 'uses the routing table' do
|
||||
expect(described_class.table_name).to eq('p_ci_pipeline_variables')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -30,4 +30,11 @@ RSpec.describe Integrations::Bugzilla, feature_category: :integrations do
|
|||
it { is_expected.not_to validate_presence_of(:new_issue_url) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '.attribution_notice' do
|
||||
it do
|
||||
expect(described_class.attribution_notice)
|
||||
.to eq('The Bugzilla logo is a trademark of the Mozilla Foundation in the U.S. and other countries.')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -47,12 +47,13 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
|
|||
|
||||
describe 'version' do
|
||||
where(:ctx, :version) do
|
||||
'version is blank' | ''
|
||||
'version is not valid package version' | '!!()()'
|
||||
'version is too large' | ('a' * 256)
|
||||
'can\'t be blank' | ''
|
||||
'is invalid' | '!!()()'
|
||||
'is too long (maximum is 255 characters)' | ('a' * 256)
|
||||
'must follow semantic version' | '1'
|
||||
end
|
||||
with_them do
|
||||
it { expect(errors).to include(:version) }
|
||||
it { expect(errors.messages.values.flatten).to include(ctx) }
|
||||
end
|
||||
|
||||
context 'when version is not unique in project+name' do
|
||||
|
|
@ -272,7 +273,7 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
|
|||
end
|
||||
|
||||
context 'when parsing semver components' do
|
||||
let(:model_version) { build(:ml_model_versions, model: model1, semver: semver, project: base_project) }
|
||||
let(:model_version) { build(:ml_model_versions, model: model1, version: semver, project: base_project) }
|
||||
|
||||
where(:semver, :valid, :major, :minor, :patch, :prerelease) do
|
||||
'1' | false | nil | nil | nil | nil
|
||||
|
|
|
|||
|
|
@ -327,6 +327,28 @@ RSpec.describe PersonalAccessToken, feature_category: :system_access do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.expiring_and_not_notified_without_impersonation' do
|
||||
let_it_be(:expired_token) { create(:personal_access_token, expires_at: 2.days.ago) }
|
||||
let_it_be(:revoked_token) { create(:personal_access_token, revoked: true) }
|
||||
let_it_be(:valid_token_and_notified) { create(:personal_access_token, expires_at: 2.days.from_now, expire_notification_delivered: true) }
|
||||
let_it_be(:valid_token) { create(:personal_access_token, expires_at: 2.days.from_now, impersonation: false) }
|
||||
let_it_be(:long_expiry_token) { create(:personal_access_token, expires_at: described_class::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS.days.from_now) }
|
||||
|
||||
context 'when token is there to be notified' do
|
||||
it "has only unnotified tokens" do
|
||||
expect(described_class.expiring_and_not_notified_without_impersonation).to contain_exactly(valid_token)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when no token is there to be notified' do
|
||||
it "return empty array" do
|
||||
valid_token.update!(impersonation: true)
|
||||
|
||||
expect(described_class.expiring_and_not_notified_without_impersonation).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.expired_today_and_not_notified' do
|
||||
let_it_be(:active) { create(:personal_access_token) }
|
||||
let_it_be(:expired_yesterday) { create(:personal_access_token, expires_at: Date.yesterday) }
|
||||
|
|
|
|||
|
|
@ -162,6 +162,7 @@ RSpec.describe User, feature_category: :user_profile do
|
|||
it { is_expected.to have_many(:groups) }
|
||||
it { is_expected.to have_many(:keys).dependent(:destroy) }
|
||||
it { is_expected.to have_many(:expired_today_and_unnotified_keys) }
|
||||
it { is_expected.to have_many(:expiring_soon_and_unnotified_personal_access_tokens) }
|
||||
it { is_expected.to have_many(:deploy_keys).dependent(:nullify) }
|
||||
it { is_expected.to have_many(:group_deploy_keys) }
|
||||
it { is_expected.to have_many(:events).dependent(:delete_all) }
|
||||
|
|
@ -1413,6 +1414,24 @@ RSpec.describe User, feature_category: :user_profile do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.with_personal_access_tokens_expiring_soon_and_ids' do
|
||||
let_it_be(:user1) { create(:user) }
|
||||
let_it_be(:user2) { create(:user) }
|
||||
let_it_be(:pat1) { create(:personal_access_token, user: user1, expires_at: 2.days.from_now) }
|
||||
let_it_be(:pat2) { create(:personal_access_token, user: user2, expires_at: 7.days.from_now) }
|
||||
let_it_be(:ids) { [user1.id] }
|
||||
|
||||
subject(:users) { described_class.with_personal_access_tokens_expiring_soon_and_ids(ids) }
|
||||
|
||||
it 'filters users only by id' do
|
||||
expect(users).to contain_exactly(user1)
|
||||
end
|
||||
|
||||
it 'includes expiring personal access tokens' do
|
||||
expect(users.first.expiring_soon_and_unnotified_personal_access_tokens).to be_loaded
|
||||
end
|
||||
end
|
||||
|
||||
describe '.active_without_ghosts' do
|
||||
let_it_be(:user1) { create(:user, :external) }
|
||||
let_it_be(:user2) { create(:user, state: 'blocked') }
|
||||
|
|
|
|||
|
|
@ -318,6 +318,7 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
|
|||
expect(json_response['status']).to eq('never_contacted')
|
||||
expect(json_response['active']).to eq(true)
|
||||
expect(json_response['paused']).to eq(false)
|
||||
expect(json_response['maintenance_note']).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -486,6 +487,14 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
|
|||
expect(shared_runner.reload.maximum_timeout).to eq(1234)
|
||||
end
|
||||
|
||||
it 'maintenance note' do
|
||||
maintenance_note = shared_runner.maintenance_note
|
||||
update_runner(shared_runner.id, admin, maintenance_note: "#{maintenance_note}_updated")
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(shared_runner.reload.maintenance_note).to eq("#{maintenance_note}_updated")
|
||||
end
|
||||
|
||||
it 'fails with no parameters' do
|
||||
put api("/runners/#{shared_runner.id}", admin)
|
||||
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ RSpec.describe 'PipelineSchedulecreate', feature_category: :continuous_integrati
|
|||
description: 'created_desc',
|
||||
cron: '0 1 * * *',
|
||||
cronTimezone: 'UTC',
|
||||
ref: 'patch-x',
|
||||
ref: 'master',
|
||||
active: true,
|
||||
variables: [
|
||||
{ key: 'AAA', value: "AAA123", variableType: 'ENV_VAR' }
|
||||
|
|
@ -107,7 +107,11 @@ RSpec.describe 'PipelineSchedulecreate', feature_category: :continuous_integrati
|
|||
|
||||
expect(mutation_response['errors'])
|
||||
.to match_array(
|
||||
["Cron is invalid syntax", "Cron timezone is invalid syntax"]
|
||||
[
|
||||
"Cron is invalid syntax",
|
||||
"Cron timezone is invalid syntax",
|
||||
"Ref is ambiguous"
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integrati
|
|||
create(:ci_pipeline_schedule_variable, key: 'bar', value: 'barvalue', pipeline_schedule: pipeline_schedule)
|
||||
end
|
||||
|
||||
let(:repository) { project.repository }
|
||||
let(:mutation) do
|
||||
variables = {
|
||||
id: pipeline_schedule.to_global_id.to_s,
|
||||
|
|
@ -73,6 +74,10 @@ RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integrati
|
|||
}
|
||||
end
|
||||
|
||||
before do
|
||||
repository.add_branch(project.creator, 'patch-x', 'master')
|
||||
end
|
||||
|
||||
it do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
|
||||
|
|
@ -146,7 +151,8 @@ RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integrati
|
|||
"Cron is invalid syntax",
|
||||
"Cron timezone is invalid syntax",
|
||||
"Ref can't be blank",
|
||||
"Description can't be blank"
|
||||
"Description can't be blank",
|
||||
"Ref is ambiguous"
|
||||
]
|
||||
)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ RSpec.describe Ci::Catalog::Resources::ReleaseService, feature_category: :pipeli
|
|||
it 'validates the catalog resource and creates a version' do
|
||||
project = create(:project, :catalog_resource_with_components)
|
||||
catalog_resource = create(:ci_catalog_resource, project: project)
|
||||
release = create(:release, project: project, sha: project.repository.root_ref_sha)
|
||||
release = create(:release, project: project, sha: project.repository.root_ref_sha, tag: '1.0.0')
|
||||
|
||||
response = described_class.new(release).execute
|
||||
|
||||
|
|
|
|||
|
|
@ -24,13 +24,13 @@ RSpec.describe Ci::Catalog::Resources::Versions::CreateService, feature_category
|
|||
)
|
||||
end
|
||||
|
||||
let(:release) { create(:release, project: project, sha: project.repository.root_ref_sha) }
|
||||
let(:release) { create(:release, tag: '1.2.0', project: project, sha: project.repository.root_ref_sha) }
|
||||
let!(:catalog_resource) { create(:ci_catalog_resource, project: project) }
|
||||
|
||||
context 'when the project is not a catalog resource' do
|
||||
it 'does not create a version' do
|
||||
project = create(:project, :repository)
|
||||
release = create(:release, project: project, sha: project.repository.root_ref_sha)
|
||||
release = create(:release, tag: '1.2.1', project: project, sha: project.repository.root_ref_sha)
|
||||
|
||||
response = described_class.new(release).execute
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ RSpec.describe Ci::PipelineSchedules::CreateService, feature_category: :continuo
|
|||
let_it_be(:reporter) { create(:user) }
|
||||
let_it_be_with_reload(:user) { create(:user) }
|
||||
let_it_be_with_reload(:project) { create(:project, :public, :repository) }
|
||||
let_it_be_with_reload(:repository) { project.repository }
|
||||
|
||||
subject(:service) { described_class.new(project, user, params) }
|
||||
|
||||
|
|
@ -15,6 +16,10 @@ RSpec.describe Ci::PipelineSchedules::CreateService, feature_category: :continuo
|
|||
end
|
||||
|
||||
describe "execute" do
|
||||
before_all do
|
||||
repository.add_branch(project.creator, 'patch-x', 'master')
|
||||
end
|
||||
|
||||
context 'when user does not have permission' do
|
||||
subject(:service) { described_class.new(project, reporter, {}) }
|
||||
|
||||
|
|
@ -48,7 +53,7 @@ RSpec.describe Ci::PipelineSchedules::CreateService, feature_category: :continuo
|
|||
|
||||
expect(result.payload).to have_attributes(
|
||||
description: 'desc',
|
||||
ref: 'patch-x',
|
||||
ref: "#{Gitlab::Git::BRANCH_REF_PREFIX}patch-x",
|
||||
active: false,
|
||||
cron: '*/1 * * * *',
|
||||
cron_timezone: 'UTC'
|
||||
|
|
|
|||
|
|
@ -14,10 +14,13 @@ RSpec.describe Ci::PipelineSchedules::UpdateService, feature_category: :continuo
|
|||
key: 'foo', value: 'foovalue', pipeline_schedule: pipeline_schedule)
|
||||
end
|
||||
|
||||
let_it_be_with_reload(:repository) { project.repository }
|
||||
|
||||
before_all do
|
||||
project.add_maintainer(user)
|
||||
project.add_owner(project_owner)
|
||||
project.add_reporter(reporter)
|
||||
repository.add_branch(project.creator, 'patch-x', 'master')
|
||||
|
||||
pipeline_schedule.reload
|
||||
end
|
||||
|
|
@ -58,7 +61,8 @@ RSpec.describe Ci::PipelineSchedules::UpdateService, feature_category: :continuo
|
|||
service.execute
|
||||
pipeline_schedule.reload
|
||||
end.to change { pipeline_schedule.description }.from('pipeline schedule').to('updated_desc')
|
||||
.and change { pipeline_schedule.ref }.from('master').to('patch-x')
|
||||
.and change { pipeline_schedule.ref }
|
||||
.from("#{Gitlab::Git::BRANCH_REF_PREFIX}master").to("#{Gitlab::Git::BRANCH_REF_PREFIX}patch-x")
|
||||
.and change { pipeline_schedule.active }.from(true).to(false)
|
||||
.and change { pipeline_schedule.cron }.from('0 1 * * *').to('*/1 * * * *')
|
||||
.and change { pipeline_schedule.variables.last.key }.from('foo').to('bar')
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ RSpec.describe Releases::CreateService, feature_category: :continuous_integratio
|
|||
end
|
||||
|
||||
context 'when project is a catalog resource' do
|
||||
let(:project) { create(:project, :catalog_resource_with_components, create_tag: 'final') }
|
||||
let_it_be(:project) { create(:project, :catalog_resource_with_components, create_tag: '6.0.0') }
|
||||
let!(:ci_catalog_resource) { create(:ci_catalog_resource, project: project) }
|
||||
let(:ref) { 'master' }
|
||||
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue