Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-11-22 12:10:30 +00:00
parent 3c9a2dd620
commit 49203bfa3c
106 changed files with 2288 additions and 779 deletions

View File

@ -30,19 +30,6 @@ download-fast-quarantine-report:
- .download-fast-quarantine-report
- .rules:download-fast-quarantine-report
cache-gems:
extends:
- .qa-install
- .ruby-image
- .rules:update-cache
stage: .pre
tags:
- e2e
script:
- echo "Populated qa cache"
cache:
policy: pull-push
# ==========================================
# Test stage
# ==========================================

View File

@ -44,7 +44,9 @@ rails-production-server-boot-puma-cng:
extends:
- .rails-production-server-boot
script:
- curl --silent https://gitlab.com/gitlab-org/build/CNG/-/raw/master/gitlab-webservice/configuration/puma.rb > config/puma.rb
- define_trigger_branch_in_build_env
- echo "TRIGGER_BRANCH is defined as ${TRIGGER_BRANCH}"
- curl --silent "https://gitlab.com/gitlab-org/build/CNG/-/raw/${TRIGGER_BRANCH}/gitlab-webservice/configuration/puma.rb" > config/puma.rb
- sed --in-place "s:/srv/gitlab:${PWD}:" config/puma.rb
- bundle exec puma --environment production --config config/puma.rb &
- sleep 40 # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/114124#note_1309506358

View File

@ -14,7 +14,7 @@ include:
gitlab_auth_token_variable_name: "PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE"
allure_job_name: "${QA_RUN_TYPE}"
- project: gitlab-org/quality/pipeline-common
ref: 7.10.2
ref: 7.12.1
file:
- /ci/base.gitlab-ci.yml
- /ci/knapsack-report.yml

View File

@ -64,10 +64,6 @@
rules:
- when: always
.rules:update-cache:
rules:
- if: '$UPDATE_QA_CACHE == "true"'
.rules:download-knapsack:
rules:
- when: always

View File

@ -62,15 +62,21 @@ qa:master-auto-quarantine-dequarantine:
- bundle exec confiner -r .confiner/master.yml
allow_failure: true
qa:update-qa-cache:
cache-qa-gems:
extends:
- .qa-job-base
- .qa-cache-push
- .shared:rules:update-cache
- .qa:rules:update-gem-cache
stage: prepare
script:
- echo "Cache has been updated and ready to be uploaded."
# E2E runners have separate infra setup and different cache bucket
cache-qa-gems-e2e-runners:
extends: cache-qa-gems
tags:
- e2e
trigger-omnibus:
stage: qa
extends:

View File

@ -1443,6 +1443,16 @@
############
# QA rules #
############
.qa:rules:update-gem-cache:
rules:
- <<: *if-default-refs
changes:
- qa/Gemfile.lock
- <<: *if-schedule-maintenance
- <<: *if-security-schedule
- <<: *if-foss-schedule
- <<: *if-merge-request-labels-update-caches
.qa:rules:code-merge-request-manual:
rules:
- <<: *if-merge-request
@ -1592,11 +1602,6 @@
- <<: *if-merge-request
changes: *ci-qa-patterns
allow_failure: true
- <<: *if-merge-request
changes:
- qa/Gemfile.lock # qa/Gemfile.lock is a part of *qa-patterns, so this rule must be placed before the one with *qa-patterns changes
variables:
UPDATE_QA_CACHE: "true"
- <<: *if-merge-request
changes: *qa-patterns
allow_failure: true
@ -1636,11 +1641,6 @@
when: manual
- <<: *if-merge-request
changes: *nodejs-patterns
- <<: *if-merge-request
changes:
- qa/Gemfile.lock # qa/Gemfile.lock is a part of *qa-patterns, so this rule must be placed before the one with *qa-patterns changes
variables:
UPDATE_QA_CACHE: "true"
- <<: *if-dot-com-gitlab-org-and-security-merge-request-and-qa-tests-specified
changes: *code-patterns
- <<: *if-merge-request
@ -1657,7 +1657,6 @@
CREATE_TEST_FAILURE_ISSUES: "true"
PROCESS_TEST_RESULTS: "true"
KNAPSACK_GENERATE_REPORT: "true"
UPDATE_QA_CACHE: "true"
QA_SAVE_TEST_METRICS: "true"
QA_EXPORT_TEST_METRICS: "false" # on main runs, metrics are exported to separate bucket via rake task for better consistency

View File

@ -197,8 +197,7 @@ trigger-omnibus-env:
echo "NEXT_RUBY_VERSION=${NEXT_RUBY_VERSION}" >> $BUILD_ENV
echo "GITLAB_ASSETS_TAG=$(assets_image_tag)" >> $BUILD_ENV
echo "EE=$([[ $FOSS_ONLY == '1' ]] && echo 'false' || echo 'true')" >> $BUILD_ENV
target_branch_name="${CI_MERGE_REQUEST_TARGET_BRANCH_NAME:-${CI_COMMIT_REF_NAME}}"
echo "TRIGGER_BRANCH=$([[ "${target_branch_name}" =~ ^[0-9-]+-stable(-ee)?$ ]] && echo ${target_branch_name%-ee} || echo 'master')" >> $BUILD_ENV
define_trigger_branch_in_build_env
- |
echo "Built environment file for omnibus build:"
cat $BUILD_ENV

View File

@ -124,17 +124,6 @@ download-knapsack-report:
- .download-knapsack-report
- .rules:download-knapsack
cache-gems:
extends:
- .ruby-image
- .qa-cache-push
- .rules:update-cache
stage: .pre
tags:
- e2e
script:
- cd qa && bundle install
# Take the existing GDK docker image and reconfigure it with Postgres load
# balancing. Adding 5s lag to 1 of the replicas to validate robustness of
# the load balancer.

View File

@ -1 +1 @@
db9003ccaed4618cb6c9e1d8ce99f7794c868a65
6835085898eb8d3881ee98c476a7bfc2981f0067

View File

@ -6,6 +6,7 @@ import SkeletonLoadingContainer from '~/vue_shared/components/notes/skeleton_not
import { SKELETON_NOTES_COUNT } from '~/admin/abuse_report/constants';
import abuseReportNotesQuery from '../graphql/notes/abuse_report_notes.query.graphql';
import AbuseReportDiscussion from './notes/abuse_report_discussion.vue';
import AbuseReportAddNote from './notes/abuse_report_add_note.vue';
export default {
name: 'AbuseReportNotes',
@ -16,6 +17,7 @@ export default {
components: {
SkeletonLoadingContainer,
AbuseReportDiscussion,
AbuseReportAddNote,
},
props: {
abuseReportId: {
@ -60,6 +62,9 @@ export default {
const discussionId = discussion.notes.nodes[0].id;
return discussionId.split('/')[discussionId.split('/').length - 1];
},
updateKey() {
this.addNoteKey = uniqueId(`abuse-report-add-note-${this.abuseReportId}`);
},
},
};
</script>
@ -86,6 +91,16 @@ export default {
:abuse-report-id="abuseReportId"
/>
</ul>
<div class="js-comment-form">
<ul class="notes notes-form timeline">
<abuse-report-add-note
:key="addNoteKey"
:is-new-discussion="true"
:abuse-report-id="abuseReportId"
@cancelEditing="updateKey"
/>
</ul>
</div>
</template>
</div>
</div>

View File

@ -0,0 +1,138 @@
<script>
import { sprintf, __ } from '~/locale';
import { createAlert } from '~/alert';
import { clearDraft } from '~/lib/utils/autosave';
import createNoteMutation from '../../graphql/notes/create_abuse_report_note.mutation.graphql';
import AbuseReportCommentForm from './abuse_report_comment_form.vue';
export default {
name: 'AbuseReportAddNote',
i18n: {
reply: __('Reply'),
replyToComment: __('Reply to comment'),
commentError: __('Your comment could not be submitted because %{reason}.'),
genericError: __(
'Your comment could not be submitted! Please check your network connection and try again.',
),
},
components: {
AbuseReportCommentForm,
},
props: {
abuseReportId: {
type: String,
required: true,
},
discussionId: {
type: String,
required: false,
default: '',
},
isNewDiscussion: {
type: Boolean,
required: false,
default: false,
},
},
data() {
return {
isEditing: this.isNewDiscussion,
isSubmitting: false,
};
},
computed: {
autosaveKey() {
// eslint-disable-next-line @gitlab/require-i18n-strings
return this.discussionId ? `${this.discussionId}-comment` : `${this.abuseReportId}-comment`;
},
timelineEntryClasses() {
return this.isNewDiscussion
? 'timeline-entry note-form'
: // eslint-disable-next-line @gitlab/require-i18n-strings
'note note-wrapper note-comment discussion-reply-holder gl-border-t-0! clearfix';
},
timelineEntryInnerClasses() {
return this.isNewDiscussion ? 'timeline-entry-inner' : '';
},
commentFormWrapperClasses() {
return !this.isEditing
? 'gl-relative gl-display-flex gl-align-items-flex-start gl-flex-nowrap'
: '';
},
},
methods: {
async addNote({ commentText }) {
this.isSubmitting = true;
this.$apollo
.mutate({
mutation: createNoteMutation,
variables: {
input: {
noteableId: this.abuseReportId,
body: commentText,
discussionId: this.discussionId || null,
},
},
})
.then(() => {
clearDraft(this.autosaveKey);
this.cancelEditing();
})
.catch((error) => {
const errorMessage = error?.message
? sprintf(this.$options.i18n.commentError, { reason: error.message.toLowerCase() })
: this.$options.i18n.genericError;
createAlert({
message: errorMessage,
parent: this.$el,
captureError: true,
});
})
.finally(() => {
this.isSubmitting = false;
});
},
cancelEditing() {
this.isEditing = this.isNewDiscussion;
this.$emit('cancelEditing');
},
showReplyForm() {
this.isEditing = true;
},
},
};
</script>
<template>
<li :class="timelineEntryClasses" data-testid="abuse-report-note-timeline-entry">
<div :class="timelineEntryInnerClasses" data-testid="abuse-report-note-timeline-entry-inner">
<div class="timeline-content">
<div class="flash-container"></div>
<div :class="commentFormWrapperClasses" data-testid="abuse-report-comment-form-wrapper">
<abuse-report-comment-form
v-if="isEditing"
:abuse-report-id="abuseReportId"
:is-submitting="isSubmitting"
:autosave-key="autosaveKey"
:is-new-discussion="isNewDiscussion"
@submitForm="addNote"
@cancelEditing="cancelEditing"
/>
<textarea
v-else
ref="textarea"
rows="1"
class="reply-placeholder-text-field gl-font-regular!"
data-testid="abuse-report-note-reply-textarea"
:placeholder="$options.i18n.reply"
:aria-label="$options.i18n.replyToComment"
@focus="showReplyForm"
@click="showReplyForm"
></textarea>
</div>
</div>
</div>
</li>
</template>

View File

@ -0,0 +1,136 @@
<script>
import { GlButton } from '@gitlab/ui';
import { __, s__ } from '~/locale';
import { helpPagePath } from '~/helpers/help_page_helper';
import { getDraft, clearDraft, updateDraft } from '~/lib/utils/autosave';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
export default {
name: 'AbuseReportCommentForm',
i18n: {
addReplyText: __('Add a reply'),
placeholderText: __('Write a comment or drag your files here…'),
cancelButtonText: __('Cancel'),
confirmText: s__('Notes|Are you sure you want to cancel creating this comment?'),
discardText: __('Discard changes'),
continueEditingText: __('Continue editing'),
},
components: {
GlButton,
MarkdownEditor,
},
inject: ['uploadNoteAttachmentPath'],
props: {
abuseReportId: {
type: String,
required: true,
},
isSubmitting: {
type: Boolean,
required: false,
default: false,
},
autosaveKey: {
type: String,
required: true,
},
isNewDiscussion: {
type: Boolean,
required: false,
default: false,
},
initialValue: {
type: String,
required: false,
default: '',
},
},
data() {
return {
commentText: getDraft(this.autosaveKey) || this.initialValue || '',
};
},
computed: {
formFieldProps() {
return {
'aria-label': this.$options.i18n.addReplyText,
placeholder: this.$options.i18n.placeholderText,
id: 'abuse-report-add-or-edit-comment',
name: 'abuse-report-add-or-edit-comment',
};
},
markdownDocsPath() {
return helpPagePath('user/markdown');
},
commentButtonText() {
return this.isNewDiscussion ? __('Comment') : __('Reply');
},
},
methods: {
setCommentText(newText) {
if (!this.isSubmitting) {
this.commentText = newText;
updateDraft(this.autosaveKey, this.commentText);
}
},
async cancelEditing() {
if (this.commentText && this.commentText !== this.initialValue) {
const confirmed = await confirmAction(this.$options.i18n.confirmText, {
primaryBtnText: this.$options.i18n.discardText,
cancelBtnText: this.$options.i18n.continueEditingText,
primaryBtnVariant: 'danger',
});
if (!confirmed) {
return;
}
}
this.$emit('cancelEditing');
clearDraft(this.autosaveKey);
},
},
};
</script>
<template>
<div class="timeline-discussion-body gl-overflow-visible!">
<div class="note-body gl-p-0! gl-overflow-visible!">
<form class="common-note-form gfm-form js-main-target-form gl-flex-grow-1 new-note">
<markdown-editor
:value="commentText"
:enable-content-editor="false"
render-markdown-path=""
:uploads-path="uploadNoteAttachmentPath"
:markdown-docs-path="markdownDocsPath"
:form-field-props="formFieldProps"
:autofocus="true"
@input="setCommentText"
@keydown.meta.enter="$emit('submitForm', { commentText })"
@keydown.ctrl.enter="$emit('submitForm', { commentText })"
@keydown.esc.stop="cancelEditing"
/>
<div class="note-form-actions">
<gl-button
category="primary"
variant="confirm"
data-testid="comment-button"
:disabled="!commentText.length"
:loading="isSubmitting"
@click="$emit('submitForm', { commentText })"
>
{{ commentButtonText }}
</gl-button>
<gl-button
data-testid="cancel-button"
category="primary"
class="gl-ml-3"
@click="cancelEditing"
>{{ $options.i18n.cancelButtonText }}
</gl-button>
</div>
</form>
</div>
</div>
</template>

View File

@ -4,6 +4,7 @@ import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item
import DiscussionNotesRepliesWrapper from '~/notes/components/discussion_notes_replies_wrapper.vue';
import ToggleRepliesWidget from '~/notes/components/toggle_replies_widget.vue';
import AbuseReportNote from './abuse_report_note.vue';
import AbuseReportAddNote from './abuse_report_add_note.vue';
export default {
name: 'AbuseReportDiscussion',
@ -12,6 +13,7 @@ export default {
DiscussionNotesRepliesWrapper,
ToggleRepliesWidget,
AbuseReportNote,
AbuseReportAddNote,
},
props: {
abuseReportId: {
@ -92,6 +94,11 @@ export default {
:abuse-report-id="abuseReportId"
/>
</template>
<abuse-report-add-note
:discussion-id="discussionId"
:is-new-discussion="false"
:abuse-report-id="abuseReportId"
/>
</template>
</discussion-notes-replies-wrapper>
</ul>

View File

@ -30,6 +30,7 @@ export const initAbuseReportApp = () => {
allowScopedLabels: false,
updatePath: abuseReport.report.updatePath,
listPath: abuseReportsListPath,
uploadNoteAttachmentPath: abuseReport.uploadNoteAttachmentPath,
labelsManagePath: '',
allowLabelCreate: true,
},

View File

@ -44,6 +44,7 @@ export const initPipelinesIndex = (selector = '#pipelines-list-vue') => {
params,
fullPath,
visibilityPipelineIdType,
showJenkinsCiPrompt,
} = el.dataset;
return new Vue({
@ -57,6 +58,7 @@ export const initPipelinesIndex = (selector = '#pipelines-list-vue') => {
pipelineEditorPath,
pipelineSchedulesPath,
suggestedCiTemplates: JSON.parse(suggestedCiTemplates),
showJenkinsCiPrompt: parseBoolean(showJenkinsCiPrompt),
},
data() {
return {

View File

@ -1,4 +1,5 @@
import { s__ } from '~/locale';
import { helpPagePath } from '~/helpers/help_page_helper';
export const EDITOR_APP_DRAWER_HELP = 'HELP';
export const EDITOR_APP_DRAWER_JOB_ASSISTANT = 'JOB_ASSISTANT';
@ -93,6 +94,9 @@ export const VALIDATE_TAB_FEEDBACK_URL = 'https://gitlab.com/gitlab-org/gitlab/-
export const COMMIT_SHA_POLL_INTERVAL = 1000;
export const MIGRATION_PLAN_HELP_PATH = helpPagePath('ci/migration/plan_a_migration');
export const MIGRATE_FROM_JENKINS_TRACKING_LABEL = 'migrate_from_jenkins_prompt';
export const I18N = {
title: s__('Pipelines|Get started with GitLab CI/CD'),
learnBasics: {
@ -107,6 +111,13 @@ export const I18N = {
),
cta: s__('Pipelines|Try test template'),
},
migrateFromJenkins: {
title: s__('Pipelines|Migrate to GitLab CI/CD from Jenkins'),
description: s__(
'Pipelines|Take advantage of simple, scalable pipelines and CI/CD-enabled features. You can view integration results, security scans, tests, code coverage and more directly in merge requests!',
),
cta: s__('Pipelines|Start with a migration plan'),
},
},
templates: {
title: s__('Pipelines|Ready to set up CI/CD for your project?'),

View File

@ -1,7 +1,12 @@
<script>
import { GlButton, GlCard, GlSprintf } from '@gitlab/ui';
import { mergeUrlParams } from '~/lib/utils/url_utility';
import { STARTER_TEMPLATE_NAME, I18N } from '~/ci/pipeline_editor/constants';
import {
STARTER_TEMPLATE_NAME,
I18N,
MIGRATION_PLAN_HELP_PATH,
MIGRATE_FROM_JENKINS_TRACKING_LABEL,
} from '~/ci/pipeline_editor/constants';
import Tracking from '~/tracking';
import CiTemplates from './ci_templates.vue';
@ -15,7 +20,7 @@ export default {
mixins: [Tracking.mixin()],
STARTER_TEMPLATE_NAME,
I18N,
inject: ['pipelineEditorPath'],
inject: ['pipelineEditorPath', 'showJenkinsCiPrompt'],
data() {
return {
gettingStartedTemplateUrl: mergeUrlParams(
@ -23,17 +28,23 @@ export default {
this.pipelineEditorPath,
),
tracker: null,
migrationPlanUrl: MIGRATION_PLAN_HELP_PATH,
migrationPromptTrackingLabel: MIGRATE_FROM_JENKINS_TRACKING_LABEL,
};
},
mounted() {
if (this.showJenkinsCiPrompt) {
this.trackEvent('render', this.migrationPromptTrackingLabel);
}
},
methods: {
trackEvent(template) {
this.track('template_clicked', {
label: template,
});
trackEvent(action, label) {
this.track(action, { label });
},
},
};
</script>
<template>
<div>
<h2 class="gl-font-size-h2 gl-text-gray-900">{{ $options.I18N.title }}</h2>
@ -47,28 +58,62 @@ export default {
</gl-sprintf>
</p>
<div class="gl-lg-w-25p gl-lg-pr-5 gl-mb-8">
<gl-card>
<div class="gl-flex-direction-row">
<div class="gl-py-5"><gl-emoji class="gl-font-size-h2-xl" data-name="wave" /></div>
<div class="gl-mb-3">
<strong class="gl-text-gray-800 gl-mb-2">
{{ $options.I18N.learnBasics.gettingStarted.title }}
</strong>
<div class="gl-display-flex gl-flex-direction-row gl-flex-wrap">
<div
v-if="showJenkinsCiPrompt"
class="gl-lg-w-25p gl-md-w-half gl-w-full gl-md-pr-5 gl-pb-8"
data-testid="migrate-from-jenkins-prompt"
>
<gl-card class="gl-bg-blue-50">
<div class="gl-flex-direction-row">
<div class="gl-py-5"><gl-emoji class="gl-font-size-h2-xl" data-name="rocket" /></div>
<div class="gl-mb-3">
<strong class="gl-text-gray-800 gl-mb-2">{{
$options.I18N.learnBasics.migrateFromJenkins.title
}}</strong>
</div>
<p class="gl-font-sm gl-h-13">
{{ $options.I18N.learnBasics.migrateFromJenkins.description }}
</p>
</div>
<p class="gl-font-sm">{{ $options.I18N.learnBasics.gettingStarted.description }}</p>
</div>
<gl-button
category="primary"
variant="confirm"
:href="gettingStartedTemplateUrl"
data-testid="test-template-link"
@click="trackEvent($options.STARTER_TEMPLATE_NAME)"
>
{{ $options.I18N.learnBasics.gettingStarted.cta }}
</gl-button>
</gl-card>
<gl-button
category="primary"
variant="confirm"
:href="migrationPlanUrl"
target="_blank"
@click="trackEvent('template_clicked', migrationPromptTrackingLabel)"
>
{{ $options.I18N.learnBasics.migrateFromJenkins.cta }}
</gl-button>
</gl-card>
</div>
<div class="gl-lg-w-25p gl-md-w-half gl-w-full gl-pb-8">
<gl-card>
<div class="gl-flex-direction-row">
<div class="gl-py-5"><gl-emoji class="gl-font-size-h2-xl" data-name="wave" /></div>
<div class="gl-mb-3">
<strong class="gl-text-gray-800 gl-mb-2">
{{ $options.I18N.learnBasics.gettingStarted.title }}
</strong>
</div>
<p class="gl-font-sm gl-h-13">
{{ $options.I18N.learnBasics.gettingStarted.description }}
</p>
</div>
<gl-button
category="primary"
variant="confirm"
:href="gettingStartedTemplateUrl"
data-testid="test-template-link"
@click="trackEvent('template_clicked', $options.STARTER_TEMPLATE_NAME)"
>
{{ $options.I18N.learnBasics.gettingStarted.cta }}
</gl-button>
</gl-card>
</div>
</div>
<h2 class="gl-font-lg gl-text-gray-900">{{ $options.I18N.templates.title }}</h2>

View File

@ -70,3 +70,11 @@ export default {
:items="items"
/>
</template>
<style scoped>
/* TODO: Use max-height prop when gitlab-ui got updated.
See https://gitlab.com/gitlab-org/gitlab-ui/-/issues/2374 */
::v-deep .gl-new-dropdown-inner {
max-height: 310px;
}
</style>

View File

@ -1,50 +0,0 @@
// TODO: Remove this with the removal of the old navigation.
// See https://gitlab.com/groups/gitlab-org/-/epics/11875.
import { highCountTrim } from '~/lib/utils/text_utility';
import Tracking from '~/tracking';
/**
* Updates todo counter when todos are toggled.
* When count is 0, we hide the badge.
*
* @param {jQuery.Event} e
* @param {String} count
*/
export default function initTodoToggle() {
document.addEventListener('todo:toggle', (e) => {
const updatedCount = e.detail.count || 0;
const todoPendingCount = document.querySelector('.js-todos-count');
if (todoPendingCount) {
todoPendingCount.textContent = highCountTrim(updatedCount);
if (updatedCount === 0) {
todoPendingCount.classList.add('hidden');
} else {
todoPendingCount.classList.remove('hidden');
}
}
});
}
function trackShowUserDropdownLink(trackEvent, elToTrack, el) {
const { trackLabel, trackProperty } = elToTrack.dataset;
el.addEventListener('shown.bs.dropdown', () => {
Tracking.event(document.body.dataset.page, trackEvent, {
label: trackLabel,
property: trackProperty,
});
});
}
export function initNavUserDropdownTracking() {
const el = document.querySelector('.js-nav-user-dropdown');
const buyEl = document.querySelector('.js-buy-pipeline-minutes-link');
if (el && buyEl) {
trackShowUserDropdownLink('show_buy_ci_minutes', buyEl, el);
}
}
requestIdleCallback(initNavUserDropdownTracking);

View File

@ -15,7 +15,6 @@ import * as tooltips from '~/tooltips';
import { initPrefetchLinks } from '~/lib/utils/navigation_utility';
import { logHelloDeferred } from 'jh_else_ce/lib/logger/hello_deferred';
import initAlertHandler from './alert_handler';
import initTodoToggle from './header';
import initLayoutNav from './layout_nav';
import { handleLocationHash, addSelectOnFocusBehaviour } from './lib/utils/common_utils';
import { localTimeAgo } from './lib/utils/datetime/timeago_utility';
@ -86,7 +85,6 @@ function deferredInitialisation() {
if (!gon.use_new_navigation) {
initTopNav();
initTodoToggle();
}
initBreadcrumbs();
initPrefetchLinks('.js-prefetch-document');

View File

@ -1,6 +1,26 @@
import { __ } from '~/locale';
import UserItem from './user_item.vue';
import GroupItem from './group_item.vue';
import DeployKeyItem from './deploy_key_item.vue';
export const CONFIG = {
users: { title: __('Users'), icon: 'user', filterKey: 'username', showNamespaceDropdown: true },
groups: { title: __('Groups'), icon: 'group', filterKey: 'name' },
users: {
title: __('Users'),
icon: 'user',
filterKey: 'username',
showNamespaceDropdown: true,
component: UserItem,
},
groups: {
title: __('Groups'),
icon: 'group',
filterKey: 'name',
component: GroupItem,
},
deployKeys: {
title: __('Deploy keys'),
icon: 'key',
filterKey: 'name',
component: DeployKeyItem,
},
};

View File

@ -0,0 +1,51 @@
<script>
import { GlButton, GlIcon } from '@gitlab/ui';
import { sprintf, __ } from '~/locale';
export default {
name: 'DeployKeyItem',
components: { GlButton, GlIcon },
props: {
data: {
type: Object,
required: true,
},
canDelete: {
type: Boolean,
required: false,
default: false,
},
},
data() {
const { title, owner, id } = this.data;
return {
deleteButtonLabel: sprintf(__('Delete %{name}'), { name: title }),
title,
owner,
id,
};
},
};
</script>
<template>
<span
class="gl-display-flex gl-align-items-center gl-gap-3"
data-testid="deploy-key-wrapper"
@click="$emit('select', id)"
>
<gl-icon name="key" />
<span class="gl-display-flex gl-flex-direction-column gl-flex-grow-1">
<span class="gl-font-weight-bold">{{ title }}</span>
<span class="gl-text-gray-600">@{{ owner }}</span>
</span>
<gl-button
v-if="canDelete"
icon="remove"
:aria-label="deleteButtonLabel"
category="tertiary"
@click.stop="$emit('delete', id)"
/>
</span>
</template>

View File

@ -5,8 +5,6 @@ import { createAlert } from '~/alert';
import { __ } from '~/locale';
import groupsAutocompleteQuery from '~/graphql_shared/queries/groups_autocomplete.query.graphql';
import Api from '~/api';
import UserItem from './user_item.vue';
import GroupItem from './group_item.vue';
import { CONFIG } from './constants';
const I18N = {
@ -25,10 +23,6 @@ export default {
GlCollapsibleListbox,
},
props: {
title: {
type: String,
required: true,
},
type: {
type: String,
required: true,
@ -61,12 +55,6 @@ export default {
config() {
return CONFIG[this.type];
},
isUserVariant() {
return this.type === 'users';
},
component() {
return this.isUserVariant ? UserItem : GroupItem;
},
namespaceDropdownText() {
return parseBoolean(this.isProjectNamespace)
? this.$options.i18n.projectGroups
@ -77,12 +65,14 @@ export default {
async handleSearchInput(search) {
this.$refs.results.open();
const searchMethod = {
users: this.fetchUsersBySearchTerm,
groups: this.fetchGroupsBySearchTerm,
deployKeys: this.fetchDeployKeysBySearchTerm,
};
try {
if (this.isUserVariant) {
this.items = await this.fetchUsersBySearchTerm(search);
} else {
this.items = await this.fetchGroupsBySearchTerm(search);
}
this.items = await searchMethod[this.type](search);
} catch (e) {
createAlert({
message: this.$options.i18n.apiErrorMessage,
@ -114,6 +104,10 @@ export default {
})),
);
},
fetchDeployKeysBySearchTerm() {
// TODO - implement API request (follow-up)
// https://gitlab.com/gitlab-org/gitlab/-/issues/432494
},
getItemByKey(key) {
return this.items.find((item) => item[this.config.filterKey] === key);
},
@ -139,7 +133,7 @@ export default {
<gl-card header-class="gl-new-card-header gl-border-none" body-class="gl-card-footer">
<template #header
><strong data-testid="list-selector-title"
>{{ title }}
>{{ config.title }}
<span class="gl-text-gray-700 gl-ml-3"
><gl-icon :name="config.icon" /> {{ selectedItems.length }}</span
></strong
@ -166,7 +160,7 @@ export default {
</template>
<template #list-item="{ item }">
<component :is="component" :data="item" @select="handleSelectItem" />
<component :is="config.component" :data="item" @select="handleSelectItem" />
</template>
</gl-collapsible-listbox>
@ -180,7 +174,7 @@ export default {
</div>
<component
:is="component"
:is="config.component"
v-for="(item, index) of selectedItems"
:key="index"
:class="{ 'gl-border-t': index > 0 }"

View File

@ -87,7 +87,8 @@ module Ci
pipeline_editor_path: can?(current_user, :create_pipeline, project) && project_ci_pipeline_editor_path(project),
suggested_ci_templates: suggested_ci_templates.to_json,
full_path: project.full_path,
visibility_pipeline_id_type: visibility_pipeline_id_type
visibility_pipeline_id_type: visibility_pipeline_id_type,
show_jenkins_ci_prompt: show_jenkins_ci_prompt(project).to_s
}
end
@ -104,5 +105,12 @@ module Ci
yield markdown(warning.content)
end
end
def show_jenkins_ci_prompt(project)
return false unless can?(current_user, :create_pipeline, project)
return false if project.repository.gitlab_ci_yml.present?
project.repository.jenkinsfile?
end
end
end

View File

@ -124,7 +124,7 @@ class AbuseReport < ApplicationRecord
return screenshot.url unless screenshot.upload
asset_host = ActionController::Base.asset_host || Gitlab.config.gitlab.base_url
local_path = Gitlab::Routing.url_helpers.abuse_report_upload_path(
local_path = Gitlab::Routing.url_helpers.abuse_report_screenshot_path(
filename: screenshot.filename,
id: screenshot.upload.model_id,
model: 'abuse_report',

View File

@ -44,7 +44,8 @@ module Ci
attr_reader :current_user
def all_resources
Ci::Catalog::Resource.joins(:project).includes(:project)
Ci::Catalog::Resource.published
.joins(:project).includes(:project)
.merge(Project.public_or_visible_to_user(current_user))
end

View File

@ -679,6 +679,10 @@ class Repository
end
cache_method :gitlab_ci_yml
def jenkinsfile?
file_on_head(:jenkinsfile).present?
end
def xcode_project?
file_on_head(:xcode_config, :tree).present?
end

View File

@ -76,5 +76,9 @@ module Admin
expose :report do |report|
ReportedContentEntity.represent(report)
end
expose :upload_note_attachment_path do |report|
upload_path('abuse_report', id: report.id)
end
end
end

View File

@ -9,7 +9,7 @@ scope path: :uploads do
# show uploads for models, snippets (notes) available for now
get '-/system/:model/:id/:secret/:filename',
to: 'uploads#show',
constraints: { model: /personal_snippet|user/, id: /\d+/, filename: %r{[^/]+} }
constraints: { model: /personal_snippet|user|abuse_report/, id: /\d+/, filename: %r{[^/]+} }
# show temporary uploads
get '-/system/temp/:secret/:filename',
@ -25,12 +25,12 @@ scope path: :uploads do
# create uploads for models, snippets (notes) available for now
post ':model',
to: 'uploads#create',
constraints: { model: /personal_snippet|user/, id: /\d+/ },
constraints: { model: /personal_snippet|user|abuse_report/, id: /\d+/ },
as: 'upload'
post ':model/authorize',
to: 'uploads#authorize',
constraints: { model: /personal_snippet|user/ }
constraints: { model: /personal_snippet|user|abuse_report/ }
# Alert Metric Images
get "-/system/:model/:mounted_as/:id/:filename",
@ -38,11 +38,11 @@ scope path: :uploads do
constraints: { model: /alert_management_metric_image/, mounted_as: /file/, filename: %r{[^/]+} },
as: 'alert_metric_image_upload'
# Abuse Reports Images
# screenshots uploaded by users when reporting abuse
get "-/system/:model/:mounted_as/:id/:filename",
to: "uploads#show",
constraints: { model: /abuse_report/, mounted_as: /screenshot/, filename: %r{[^/]+} },
as: 'abuse_report_upload'
as: 'abuse_report_screenshot'
end
# Redirect old note attachments path to new uploads path.

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class AddAppliesToAllProtectedBranchesToApprovalGroupRules < Gitlab::Database::Migration[2.2]
milestone '16.7'
def up
add_column :approval_group_rules, :applies_to_all_protected_branches, :boolean, default: false, null: false
end
def down
remove_column :approval_group_rules, :applies_to_all_protected_branches
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class DropIndexUsersOnEmailTrigram < Gitlab::Database::Migration[2.2]
milestone '16.7'
disable_ddl_transaction!
TABLE_NAME = :users
INDEX_NAME = :index_users_on_email_trigram
def up
remove_concurrent_index_by_name TABLE_NAME, INDEX_NAME
end
def down
add_concurrent_index TABLE_NAME, :email, name: INDEX_NAME,
using: :gin, opclass: { email: :gin_trgm_ops }
end
end

View File

@ -0,0 +1 @@
6f18a91bbcd175e0ff4cdf6768fd2381b126ad91efc9137831979fff33188e06

View File

@ -0,0 +1 @@
48f5f5cbb96891364b5bb7b3349f020fb29c8f3610f93e92a8a8d377ebd78ad5

View File

@ -12323,6 +12323,7 @@ CREATE TABLE approval_group_rules (
security_orchestration_policy_configuration_id bigint,
scan_result_policy_id bigint,
name text NOT NULL,
applies_to_all_protected_branches boolean DEFAULT false NOT NULL,
CONSTRAINT check_25d42add43 CHECK ((char_length(name) <= 255))
);
@ -34788,8 +34789,6 @@ CREATE UNIQUE INDEX index_users_on_email ON users USING btree (email);
CREATE INDEX index_users_on_email_domain_and_id ON users USING btree (lower(split_part((email)::text, '@'::text, 2)), id);
CREATE INDEX index_users_on_email_trigram ON users USING gin (email gin_trgm_ops);
CREATE INDEX index_users_on_feed_token ON users USING btree (feed_token);
CREATE INDEX index_users_on_group_view ON users USING btree (group_view);

View File

@ -34,6 +34,8 @@ from the start of the merge request.
- It's not obvious Danger updates the old comment, thus you need to
pay attention to it if it is updated or not.
- When Danger tokens are rotated, it creates confusion/clutter (as old comments
can't be updated).
## Run Danger locally
@ -179,11 +181,15 @@ at GitLab so far:
## Limitations
Danger is run but its output is not added to a merge request comment if working
on a fork. This happens because the secret variable from the canonical project
is not shared to forks.
If working on a personal fork, Danger is run but it's output is not added to a
merge request comment and labels are not applied.
This happens because the secret variable from the canonical project is not shared
to forks.
### Configuring Danger for forks
The best and recommended approach is to work from the [community forks](https://gitlab.com/gitlab-community/meta),
where Danger is already configured.
### Configuring Danger for personal forks
Contributors can configure Danger for their forks with the following steps:

View File

@ -182,7 +182,7 @@ Include in the MR description:
- To produce a query plan with enough data, you can use the IDs of:
- The `gitlab-org` namespace (`namespace_id = 9970`), for queries involving a group.
- The `gitlab-org/gitlab-foss` (`project_id = 13083`) or the `gitlab-org/gitlab` (`project_id = 278964`) projects, for queries involving a project.
- For queries involving memebrship of projects, `project_namespace_id` of these projects may be required to create a query plan. These are `15846663` (for `gitlab-org/gitlab`) and `15846626` (for `gitlab-org/gitlab-foss`)
- For queries involving membership of projects, `project_namespace_id` of these projects may be required to create a query plan. These are `15846663` (for `gitlab-org/gitlab`) and `15846626` (for `gitlab-org/gitlab-foss`)
- The `gitlab-qa` user (`user_id = 1614863`), for queries involving a user.
- Optionally, you can also use your own `user_id`, or the `user_id` of a user with a long history within the project or group being used to generate the query plan.
- That means that no query plan should return 0 records or less records than the provided limit (if a limit is included). If a query is used in batching, a proper example batch with adequate included results should be identified and provided.

View File

@ -2,6 +2,8 @@
require_relative "../rspec"
require_relative "stub_env"
require_relative "next_instance_of"
require_relative "next_found_instance_of"
require_relative "configurations/time_travel"

View File

@ -5,35 +5,40 @@ module NextFoundInstanceOf
HELPER_METHOD_PATTERN = /(?:allow|expect)_next_found_(?<number>\d+)_instances_of/
def method_missing(method_name, ...)
return super unless match_data = method_name.match(HELPER_METHOD_PATTERN)
match_data = method_name.match(HELPER_METHOD_PATTERN)
return super unless match_data
helper_method = method_name.to_s.sub("_#{match_data[:number]}", '')
public_send(helper_method, *args, match_data[:number].to_i, &block)
public_send(helper_method, *args, match_data[:number].to_i, &block) # rubocop:disable GitlabSecurity/PublicSend -- it is safe
end
def respond_to_missing?(method_name, ...)
match_data = method_name.match(HELPER_METHOD_PATTERN)
return super unless match_data
helper_method = method_name.to_s.sub("_#{match_data[:number]}", '')
helper_method.respond_to_missing?(helper_method, *args, &block)
end
def expect_next_found_instance_of(klass, &block)
expect_next_found_instances_of(klass, nil, &block)
end
def expect_next_found_instances_of(klass, number)
def expect_next_found_instances_of(klass, number, &block)
check_if_active_record!(klass)
stub_allocate(expect(klass), klass, number) do |expectation|
yield(expectation)
end
stub_allocate(expect(klass), klass, number, &block)
end
def allow_next_found_instance_of(klass, &block)
allow_next_found_instances_of(klass, nil, &block)
end
def allow_next_found_instances_of(klass, number)
def allow_next_found_instances_of(klass, number, &block)
check_if_active_record!(klass)
stub_allocate(allow(klass), klass, number) do |allowance|
yield(allowance)
end
stub_allocate(allow(klass), klass, number, &block)
end
private
@ -42,7 +47,7 @@ module NextFoundInstanceOf
raise ArgumentError, ERROR_MESSAGE unless klass < ActiveRecord::Base
end
def stub_allocate(target, klass, number)
def stub_allocate(target, klass, number, &_block)
stub = receive(:allocate)
stub.exactly(number).times if number

View File

@ -4,3 +4,4 @@ require_relative "../utils"
require_relative "../version_info"
require_relative "version"
require_relative "strong_memoize"
require_relative "system"

View File

@ -0,0 +1,172 @@
# frozen_string_literal: true
module Gitlab
module Utils
# Module for gathering system/process statistics such as the memory usage.
#
# This module relies on the /proc filesystem being available. If /proc is
# not available the methods of this module will be stubbed.
module System
extend self
PROC_STAT_PATH = '/proc/self/stat'
PROC_STATUS_PATH = '/proc/%s/status'
PROC_SMAPS_ROLLUP_PATH = '/proc/%s/smaps_rollup'
PROC_LIMITS_PATH = '/proc/self/limits'
PROC_FD_GLOB = '/proc/self/fd/*'
PROC_MEM_INFO = '/proc/meminfo'
PRIVATE_PAGES_PATTERN = /^(?<type>Private_Clean|Private_Dirty|Private_Hugetlb):\s+(?<value>\d+)/
PSS_PATTERN = /^Pss:\s+(?<value>\d+)/
RSS_TOTAL_PATTERN = /^VmRSS:\s+(?<value>\d+)/
RSS_ANON_PATTERN = /^RssAnon:\s+(?<value>\d+)/
RSS_FILE_PATTERN = /^RssFile:\s+(?<value>\d+)/
MAX_OPEN_FILES_PATTERN = /Max open files\s*(?<value>\d+)/
MEM_TOTAL_PATTERN = /^MemTotal:\s+(?<value>\d+) (?<unit>.+)/
def summary
proportional_mem = memory_usage_uss_pss
{
version: RUBY_DESCRIPTION,
gc_stat: GC.stat,
memory_rss: memory_usage_rss[:total],
memory_uss: proportional_mem[:uss],
memory_pss: proportional_mem[:pss],
time_cputime: cpu_time,
time_realtime: real_time,
time_monotonic: monotonic_time
}
end
# Returns the given process' RSS (resident set size) in bytes.
def memory_usage_rss(pid: 'self')
results = { total: 0, anon: 0, file: 0 }
safe_yield_procfile(PROC_STATUS_PATH % pid) do |io|
io.each_line do |line|
if (value = parse_metric_value(line, RSS_TOTAL_PATTERN)) > 0
results[:total] = value.kilobytes
elsif (value = parse_metric_value(line, RSS_ANON_PATTERN)) > 0
results[:anon] = value.kilobytes
elsif (value = parse_metric_value(line, RSS_FILE_PATTERN)) > 0
results[:file] = value.kilobytes
end
end
end
results
end
# Returns the given process' USS/PSS (unique/proportional set size) in bytes.
def memory_usage_uss_pss(pid: 'self')
sum_matches(PROC_SMAPS_ROLLUP_PATH % pid, uss: PRIVATE_PAGES_PATTERN, pss: PSS_PATTERN)
.transform_values(&:kilobytes)
end
def memory_total
sum_matches(PROC_MEM_INFO, memory_total: MEM_TOTAL_PATTERN)[:memory_total].kilobytes
end
def file_descriptor_count
Dir.glob(PROC_FD_GLOB).length
end
def max_open_file_descriptors
sum_matches(PROC_LIMITS_PATH, max_fds: MAX_OPEN_FILES_PATTERN)[:max_fds]
end
def cpu_time
Process.clock_gettime(Process::CLOCK_PROCESS_CPUTIME_ID, :float_second)
end
# Returns the current real time in a given precision.
#
# Returns the time as a Float for precision = :float_second.
def real_time(precision = :float_second)
Process.clock_gettime(Process::CLOCK_REALTIME, precision)
end
# Returns the current monotonic clock time as seconds with microseconds precision.
#
# Returns the time as a Float.
def monotonic_time
Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_second)
end
def thread_cpu_time
# Not all OS kernels are supporting `Process::CLOCK_THREAD_CPUTIME_ID`
# Refer: https://gitlab.com/gitlab-org/gitlab/issues/30567#note_221765627
return unless defined?(Process::CLOCK_THREAD_CPUTIME_ID)
Process.clock_gettime(Process::CLOCK_THREAD_CPUTIME_ID, :float_second)
end
def thread_cpu_duration(start_time)
end_time = thread_cpu_time
return unless start_time && end_time
end_time - start_time
end
# Returns the total time the current process has been running in seconds.
def process_runtime_elapsed_seconds
# Entry 22 (1-indexed) contains the process `starttime`, see:
# https://man7.org/linux/man-pages/man5/proc.5.html
#
# This value is a fixed timestamp in clock ticks.
# To obtain an elapsed time in seconds, we divide by the number
# of ticks per second and subtract from the system uptime.
start_time_ticks = proc_stat_entries[21].to_f
clock_ticks_per_second = Etc.sysconf(Etc::SC_CLK_TCK)
uptime - (start_time_ticks / clock_ticks_per_second)
end
private
# Given a path to a file in /proc and a hash of (metric, pattern) pairs,
# sums up all values found for those patterns under the respective metric.
def sum_matches(proc_file, **patterns)
results = patterns.transform_values { 0 }
safe_yield_procfile(proc_file) do |io|
io.each_line do |line|
patterns.each do |metric, pattern|
results[metric] += parse_metric_value(line, pattern)
end
end
end
results
end
def parse_metric_value(line, pattern)
match = line.match(pattern)
return 0 unless match
match.named_captures.fetch('value', 0).to_i
end
def proc_stat_entries
safe_yield_procfile(PROC_STAT_PATH) do |io|
io.read.split(' ')
end || []
end
def safe_yield_procfile(path, &block)
File.open(path, &block)
rescue Errno::ENOENT
# This means the procfile we're reading from did not exist;
# most likely we're on Darwin.
end
# Equivalent to reading /proc/uptime on Linux 2.6+.
#
# Returns 0 if not supported, e.g. on Darwin.
def uptime
Process.clock_gettime(Process::CLOCK_BOOTTIME)
rescue NameError
0
end
end
end
end

View File

@ -1,8 +1,8 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require 'spec_helper'
RSpec.describe Gitlab::Metrics::System do
RSpec.describe Gitlab::Utils::System do
context 'when /proc files exist' do
# Modified column 22 to be 1000 (starttime ticks)
let(:proc_stat) do
@ -12,7 +12,8 @@ RSpec.describe Gitlab::Metrics::System do
end
# Fixtures pulled from:
# Linux carbon 5.3.0-7648-generic #41~1586789791~19.10~9593806-Ubuntu SMP Mon Apr 13 17:50:40 UTC x86_64 x86_64 x86_64 GNU/Linux
# Linux carbon 5.3.0-7648-generic #41~1586789791~19.10~9593806-Ubuntu SMP
# Mon Apr 13 17:50:40 UTC x86_64 x86_64 x86_64 GNU/Linux
let(:proc_status) do
# most rows omitted for brevity
<<~SNIP
@ -318,7 +319,7 @@ RSpec.describe Gitlab::Metrics::System do
stub_const("Process::CLOCK_THREAD_CPUTIME_ID", 16)
expect(Process).to receive(:clock_gettime)
.with(16, kind_of(Symbol)) { 0.111222333 }
.with(16, kind_of(Symbol)).and_return(0.111222333)
expect(described_class.thread_cpu_time).to eq(0.111222333)
end

View File

@ -6,11 +6,21 @@ module API
module Mlflow
class RegisteredModel < Grape::Entity
expose :name
expose :created_at, as: :creation_timestamp
expose :updated_at, as: :last_updated_timestamp
expose :creation_timestamp, documentation: { type: Integer }
expose :last_updated_timestamp, documentation: { type: Integer }
expose :description
expose(:user_id) { |model| model.user_id.to_s }
expose :metadata, as: :tags, using: KeyValue
private
def creation_timestamp
object.created_at.to_i
end
def last_updated_timestamp
object.updated_at.to_i
end
end
end
end

View File

@ -22,6 +22,7 @@ module Gitlab
# Configuration files
gitignore: '.gitignore',
gitlab_ci: ::Ci::Pipeline::DEFAULT_CONFIG_PATH,
jenkinsfile: 'jenkinsfile',
route_map: '.gitlab/route-map.yml',
# Dependency files

View File

@ -1,172 +1,11 @@
# frozen_string_literal: true
require 'gitlab/utils/system'
module Gitlab
module Metrics
# Module for gathering system/process statistics such as the memory usage.
#
# This module relies on the /proc filesystem being available. If /proc is
# not available the methods of this module will be stubbed.
module System
extend self
PROC_STAT_PATH = '/proc/self/stat'
PROC_STATUS_PATH = '/proc/%s/status'
PROC_SMAPS_ROLLUP_PATH = '/proc/%s/smaps_rollup'
PROC_LIMITS_PATH = '/proc/self/limits'
PROC_FD_GLOB = '/proc/self/fd/*'
PROC_MEM_INFO = '/proc/meminfo'
PRIVATE_PAGES_PATTERN = /^(Private_Clean|Private_Dirty|Private_Hugetlb):\s+(?<value>\d+)/
PSS_PATTERN = /^Pss:\s+(?<value>\d+)/
RSS_TOTAL_PATTERN = /^VmRSS:\s+(?<value>\d+)/
RSS_ANON_PATTERN = /^RssAnon:\s+(?<value>\d+)/
RSS_FILE_PATTERN = /^RssFile:\s+(?<value>\d+)/
MAX_OPEN_FILES_PATTERN = /Max open files\s*(?<value>\d+)/
MEM_TOTAL_PATTERN = /^MemTotal:\s+(?<value>\d+) (.+)/
def summary
proportional_mem = memory_usage_uss_pss
{
version: RUBY_DESCRIPTION,
gc_stat: GC.stat,
memory_rss: memory_usage_rss[:total],
memory_uss: proportional_mem[:uss],
memory_pss: proportional_mem[:pss],
time_cputime: cpu_time,
time_realtime: real_time,
time_monotonic: monotonic_time
}
end
# Returns the given process' RSS (resident set size) in bytes.
def memory_usage_rss(pid: 'self')
results = { total: 0, anon: 0, file: 0 }
safe_yield_procfile(PROC_STATUS_PATH % pid) do |io|
io.each_line do |line|
if (value = parse_metric_value(line, RSS_TOTAL_PATTERN)) > 0
results[:total] = value.kilobytes
elsif (value = parse_metric_value(line, RSS_ANON_PATTERN)) > 0
results[:anon] = value.kilobytes
elsif (value = parse_metric_value(line, RSS_FILE_PATTERN)) > 0
results[:file] = value.kilobytes
end
end
end
results
end
# Returns the given process' USS/PSS (unique/proportional set size) in bytes.
def memory_usage_uss_pss(pid: 'self')
sum_matches(PROC_SMAPS_ROLLUP_PATH % pid, uss: PRIVATE_PAGES_PATTERN, pss: PSS_PATTERN)
.transform_values(&:kilobytes)
end
def memory_total
sum_matches(PROC_MEM_INFO, memory_total: MEM_TOTAL_PATTERN)[:memory_total].kilobytes
end
def file_descriptor_count
Dir.glob(PROC_FD_GLOB).length
end
def max_open_file_descriptors
sum_matches(PROC_LIMITS_PATH, max_fds: MAX_OPEN_FILES_PATTERN)[:max_fds]
end
def cpu_time
Process.clock_gettime(Process::CLOCK_PROCESS_CPUTIME_ID, :float_second)
end
# Returns the current real time in a given precision.
#
# Returns the time as a Float for precision = :float_second.
def real_time(precision = :float_second)
Process.clock_gettime(Process::CLOCK_REALTIME, precision)
end
# Returns the current monotonic clock time as seconds with microseconds precision.
#
# Returns the time as a Float.
def monotonic_time
Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_second)
end
def thread_cpu_time
# Not all OS kernels are supporting `Process::CLOCK_THREAD_CPUTIME_ID`
# Refer: https://gitlab.com/gitlab-org/gitlab/issues/30567#note_221765627
return unless defined?(Process::CLOCK_THREAD_CPUTIME_ID)
Process.clock_gettime(Process::CLOCK_THREAD_CPUTIME_ID, :float_second)
end
def thread_cpu_duration(start_time)
end_time = thread_cpu_time
return unless start_time && end_time
end_time - start_time
end
# Returns the total time the current process has been running in seconds.
def process_runtime_elapsed_seconds
# Entry 22 (1-indexed) contains the process `starttime`, see:
# https://man7.org/linux/man-pages/man5/proc.5.html
#
# This value is a fixed timestamp in clock ticks.
# To obtain an elapsed time in seconds, we divide by the number
# of ticks per second and subtract from the system uptime.
start_time_ticks = proc_stat_entries[21].to_f
clock_ticks_per_second = Etc.sysconf(Etc::SC_CLK_TCK)
uptime - (start_time_ticks / clock_ticks_per_second)
end
private
# Given a path to a file in /proc and a hash of (metric, pattern) pairs,
# sums up all values found for those patterns under the respective metric.
def sum_matches(proc_file, **patterns)
results = patterns.transform_values { 0 }
safe_yield_procfile(proc_file) do |io|
io.each_line do |line|
patterns.each do |metric, pattern|
results[metric] += parse_metric_value(line, pattern)
end
end
end
results
end
def parse_metric_value(line, pattern)
match = line.match(pattern)
return 0 unless match
match.named_captures.fetch('value', 0).to_i
end
def proc_stat_entries
safe_yield_procfile(PROC_STAT_PATH) do |io|
io.read.split(' ')
end || []
end
def safe_yield_procfile(path, &block)
File.open(path, &block)
rescue Errno::ENOENT
# This means the procfile we're reading from did not exist;
# most likely we're on Darwin.
end
# Equivalent to reading /proc/uptime on Linux 2.6+.
#
# Returns 0 if not supported, e.g. on Darwin.
def uptime
Process.clock_gettime(Process::CLOCK_BOOTTIME)
rescue NameError
0
end
extend Gitlab::Utils::System
end
end
end

View File

@ -157,7 +157,15 @@ module Gitlab
]
end
def send_url(url, allow_redirects: false, method: 'GET', body: nil, headers: nil)
# response_statuses can be set for 'error' and 'timeout'. They are optional.
# Their values must be a symbol accepted by Rack::Utils::SYMBOL_TO_STATUS_CODE.
# Example: response_statuses : { error: :internal_server_error, timeout: :bad_request }
# timeouts can be given for the opening the connection and reading the response headers.
# Their values must be given in seconds.
# Example: timeouts: { open: 5, read: 5 }
def send_url(
url, allow_redirects: false, method: 'GET', body: nil, headers: nil, timeouts: {}, response_statuses: {}
)
params = {
'URL' => url,
'AllowRedirects' => allow_redirects,
@ -166,9 +174,24 @@ module Gitlab
'Method' => method
}.compact
if timeouts.present?
params['DialTimeout'] = "#{timeouts[:open]}s" if timeouts[:open]
params['ResponseHeaderTimeout'] = "#{timeouts[:read]}s" if timeouts[:read]
end
if response_statuses.present?
if response_statuses[:error]
params['ErrorResponseStatus'] = Rack::Utils::SYMBOL_TO_STATUS_CODE[response_statuses[:error]]
end
if response_statuses[:timeout]
params['TimeoutResponseStatus'] = Rack::Utils::SYMBOL_TO_STATUS_CODE[response_statuses[:timeout]]
end
end
[
SEND_DATA_HEADER,
"send-url:#{encode(params)}"
"send-url:#{encode(params.compact)}"
]
end

View File

@ -35294,6 +35294,9 @@ msgstr ""
msgid "Pipelines|Loading pipelines"
msgstr ""
msgid "Pipelines|Migrate to GitLab CI/CD from Jenkins"
msgstr ""
msgid "Pipelines|More Information"
msgstr ""
@ -35330,6 +35333,12 @@ msgstr ""
msgid "Pipelines|Something went wrong while cleaning runners cache."
msgstr ""
msgid "Pipelines|Start with a migration plan"
msgstr ""
msgid "Pipelines|Take advantage of simple, scalable pipelines and CI/CD-enabled features. You can view integration results, security scans, tests, code coverage and more directly in merge requests!"
msgstr ""
msgid "Pipelines|The %{namespace_name} namespace has %{percentage}%% or less Shared Runner Pipeline minutes remaining. After it runs out, no new jobs or pipelines in its projects will run."
msgstr ""

View File

@ -453,3 +453,21 @@ function download_local_gems() {
rm "${output}"
done
}
function define_trigger_branch_in_build_env() {
target_branch_name="${CI_MERGE_REQUEST_TARGET_BRANCH_NAME:-${CI_COMMIT_REF_NAME}}"
stable_branch_regex="^[0-9-]+-stable(-ee)?$"
echo "target_branch_name: ${target_branch_name}"
if [[ $target_branch_name =~ $stable_branch_regex ]]
then
export TRIGGER_BRANCH="${target_branch_name%-ee}"
else
export TRIGGER_BRANCH=master
fi
if [ -f "$BUILD_ENV" ]; then
echo "TRIGGER_BRANCH=${TRIGGER_BRANCH}" >> $BUILD_ENV
fi
}

View File

@ -1,10 +1,10 @@
# frozen_string_literal: true
require 'spec_helper'
require 'gitlab/rspec/next_instance_of'
require_relative '../../support/stub_settings_source'
require_relative '../../../sidekiq_cluster/cli'
require_relative '../../support/helpers/next_instance_of'
RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_settings_source: true do # rubocop:disable RSpec/FilePath
include NextInstanceOf

View File

@ -119,11 +119,7 @@ RSpec.describe Groups::DependencyProxyForContainersController, feature_category:
end
shared_examples 'authorize action with permission' do
context 'with a valid user' do
before do
group.add_guest(user)
end
shared_examples 'sends Workhorse instructions' do
it 'sends Workhorse local file instructions', :aggregate_failures do
subject
@ -144,6 +140,28 @@ RSpec.describe Groups::DependencyProxyForContainersController, feature_category:
expect(json_response['MaximumSize']).to eq(maximum_size)
end
end
before do
group.add_guest(user)
end
context 'with a valid user' do
it_behaves_like 'sends Workhorse instructions'
end
context 'with a valid group access token' do
let_it_be(:user) { create(:user, :project_bot) }
let_it_be_with_reload(:token) { create(:personal_access_token, user: user) }
it_behaves_like 'sends Workhorse instructions'
end
context 'with a deploy token' do
let_it_be(:user) { create(:deploy_token, :dependency_proxy_scopes, :group) }
let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: user, group: group) }
it_behaves_like 'sends Workhorse instructions'
end
end
shared_examples 'namespace statistics refresh' do

View File

@ -3,5 +3,9 @@
FactoryBot.define do
factory :ci_catalog_resource, class: 'Ci::Catalog::Resource' do
project factory: :project
trait :published do
state { :published }
end
end
end

View File

@ -29,7 +29,7 @@ RSpec.describe 'Global Catalog', :js, feature_category: :pipeline_composition do
let_it_be(:ci_catalog_resources) do
ci_resource_projects.map do |current_project|
create(:ci_catalog_resource, project: current_project)
create(:ci_catalog_resource, :published, project: current_project)
end
end
@ -118,7 +118,7 @@ RSpec.describe 'Global Catalog', :js, feature_category: :pipeline_composition do
end
context 'when the resource is published' do
let_it_be(:new_ci_resource) { create(:ci_catalog_resource, project: project, state: :published) }
let_it_be(:new_ci_resource) { create(:ci_catalog_resource, :published, project: project) }
it 'navigates to the details page' do
expect(page).to have_content('Go to the project')

View File

@ -803,15 +803,73 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end
describe 'Empty State' do
let(:project) { create(:project, :repository) }
let_it_be_with_reload(:project) { create(:project, :repository) }
before do
visit project_pipelines_path(project)
wait_for_requests
end
it 'renders empty state' do
expect(page).to have_content 'Try test template'
end
it 'does not show Jenkins Migration Prompt' do
expect(page).not_to have_content _('Migrate to GitLab CI/CD from Jenkins')
end
end
describe 'Jenkins migration prompt' do
let_it_be_with_reload(:project) { create(:project, :repository) }
before do
allow_next_instance_of(Repository) do |instance|
allow(instance).to receive(:jenkinsfile?).and_return(true)
end
end
context 'when jenkinsfile is present' do
it 'shows Jenkins Migration Prompt' do
visit project_pipelines_path(project)
wait_for_requests
expect(page).to have_content _('Migrate to GitLab CI/CD from Jenkins')
expect(page).to have_content _('Start with a migration plan')
end
end
context 'when gitlab ci file is present' do
before do
allow_next_instance_of(Repository) do |instance|
allow(instance).to receive(:gitlab_ci_yml).and_return(true)
end
end
it 'does not show migration prompt' do
expect_not_to_show_prompt(project)
end
end
context 'when AutoDevops is enabled' do
before do
project.update!(auto_devops_attributes: { enabled: true })
end
it 'does not show migration prompt' do
expect_not_to_show_prompt(project)
end
end
def expect_not_to_show_prompt(project)
visit project_pipelines_path(project)
wait_for_requests
expect(page).not_to have_content _('Migrate to GitLab CI/CD from Jenkins')
expect(page).not_to have_content _('Start with a migration plan')
end
end
end

View File

@ -22,10 +22,10 @@
"type": "integer"
},
"creation_timestamp": {
"type": "string"
"type": "integer"
},
"last_updated_timestamp": {
"type": "string"
"type": "integer"
},
"tags": {
"type": "array",

View File

@ -8,6 +8,7 @@ import SkeletonLoadingContainer from '~/vue_shared/components/notes/skeleton_not
import abuseReportNotesQuery from '~/admin/abuse_report/graphql/notes/abuse_report_notes.query.graphql';
import AbuseReportNotes from '~/admin/abuse_report/components/abuse_report_notes.vue';
import AbuseReportDiscussion from '~/admin/abuse_report/components/notes/abuse_report_discussion.vue';
import AbuseReportAddNote from '~/admin/abuse_report/components/notes/abuse_report_add_note.vue';
import { mockAbuseReport, mockNotesByIdResponse } from '../mock_data';
@ -24,6 +25,7 @@ describe('Abuse Report Notes', () => {
const findSkeletonLoaders = () => wrapper.findAllComponents(SkeletonLoadingContainer);
const findAbuseReportDiscussions = () => wrapper.findAllComponents(AbuseReportDiscussion);
const findAbuseReportAddNote = () => wrapper.findComponent(AbuseReportAddNote);
const createComponent = ({
queryHandler = notesQueryHandler,
@ -78,6 +80,16 @@ describe('Abuse Report Notes', () => {
discussion: discussions[1].notes.nodes,
});
});
it('should show the comment form', () => {
expect(findAbuseReportAddNote().exists()).toBe(true);
expect(findAbuseReportAddNote().props()).toMatchObject({
abuseReportId: mockAbuseReportId,
discussionId: '',
isNewDiscussion: true,
});
});
});
describe('When there is an error fetching the notes', () => {

View File

@ -0,0 +1,208 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createAlert } from '~/alert';
import { clearDraft } from '~/lib/utils/autosave';
import waitForPromises from 'helpers/wait_for_promises';
import createNoteMutation from '~/admin/abuse_report/graphql/notes/create_abuse_report_note.mutation.graphql';
import AbuseReportAddNote from '~/admin/abuse_report/components/notes/abuse_report_add_note.vue';
import AbuseReportCommentForm from '~/admin/abuse_report/components/notes/abuse_report_comment_form.vue';
import { mockAbuseReport, createAbuseReportNoteResponse } from '../../mock_data';
jest.mock('~/alert');
jest.mock('~/lib/utils/autosave');
Vue.use(VueApollo);
describe('Abuse Report Add Note', () => {
let wrapper;
const mockAbuseReportId = mockAbuseReport.report.globalId;
const mutationSuccessHandler = jest.fn().mockResolvedValue(createAbuseReportNoteResponse);
const findTimelineEntry = () => wrapper.findByTestId('abuse-report-note-timeline-entry');
const findTimelineEntryInner = () =>
wrapper.findByTestId('abuse-report-note-timeline-entry-inner');
const findCommentFormWrapper = () => wrapper.findByTestId('abuse-report-comment-form-wrapper');
const findAbuseReportCommentForm = () => wrapper.findComponent(AbuseReportCommentForm);
const findReplyTextarea = () => wrapper.findByTestId('abuse-report-note-reply-textarea');
const createComponent = ({
mutationHandler = mutationSuccessHandler,
abuseReportId = mockAbuseReportId,
discussionId = '',
isNewDiscussion = true,
} = {}) => {
wrapper = shallowMountExtended(AbuseReportAddNote, {
apolloProvider: createMockApollo([[createNoteMutation, mutationHandler]]),
propsData: {
abuseReportId,
discussionId,
isNewDiscussion,
},
});
};
describe('Default', () => {
beforeEach(() => {
createComponent();
});
it('should show the comment form', () => {
expect(findAbuseReportCommentForm().exists()).toBe(true);
expect(findAbuseReportCommentForm().props()).toMatchObject({
abuseReportId: mockAbuseReportId,
isSubmitting: false,
autosaveKey: `${mockAbuseReportId}-comment`,
isNewDiscussion: true,
initialValue: '',
});
});
it('should not show the reply textarea', () => {
expect(findReplyTextarea().exists()).toBe(false);
});
it('should add the correct classList to timeline-entry', () => {
expect(findTimelineEntry().classes()).toEqual(
expect.arrayContaining(['timeline-entry', 'note-form']),
);
expect(findTimelineEntryInner().classes()).toEqual(['timeline-entry-inner']);
});
});
describe('When the main comments has replies', () => {
beforeEach(() => {
createComponent({
discussionId: 'gid://gitlab/Discussion/9c7228e06fb0339a3d1440fcda960acfd8baa43a',
isNewDiscussion: false,
});
});
it('should add the correct classLists', () => {
expect(findTimelineEntry().classes()).toEqual(
expect.arrayContaining([
'note',
'note-wrapper',
'note-comment',
'discussion-reply-holder',
'gl-border-t-0!',
'clearfix',
]),
);
expect(findTimelineEntryInner().classes()).toEqual([]);
expect(findCommentFormWrapper().classes()).toEqual(
expect.arrayContaining([
'gl-relative',
'gl-display-flex',
'gl-align-items-flex-start',
'gl-flex-nowrap',
]),
);
});
it('should show not the comment form', () => {
expect(findAbuseReportCommentForm().exists()).toBe(false);
});
it('should show the reply textarea', () => {
expect(findReplyTextarea().exists()).toBe(true);
expect(findReplyTextarea().attributes()).toMatchObject({
rows: '1',
placeholder: 'Reply',
'aria-label': 'Reply to comment',
});
});
});
describe('Adding a comment', () => {
const noteText = 'mock note';
beforeEach(() => {
createComponent();
findAbuseReportCommentForm().vm.$emit('submitForm', {
commentText: noteText,
});
});
it('should call the mutation with provided noteText', async () => {
expect(findAbuseReportCommentForm().props('isSubmitting')).toBe(true);
expect(mutationSuccessHandler).toHaveBeenCalledWith({
input: {
noteableId: mockAbuseReportId,
body: noteText,
discussionId: null,
},
});
await waitForPromises();
expect(findAbuseReportCommentForm().props('isSubmitting')).toBe(false);
});
it('should add the correct classList to comment-form wrapper', () => {
expect(findCommentFormWrapper().classes()).toEqual([]);
});
it('should clear draft from local storage', async () => {
await waitForPromises();
expect(clearDraft).toHaveBeenCalledWith(`${mockAbuseReportId}-comment`);
});
it('should emit `cancelEditing` event', async () => {
await waitForPromises();
expect(wrapper.emitted('cancelEditing')).toHaveLength(1);
});
it.each`
description | errorResponse
${'with an error response'} | ${new Error('The discussion could not be found')}
${'without an error ressponse'} | ${null}
`('should show an error when mutation fails $description', async ({ errorResponse }) => {
createComponent({
mutationHandler: jest.fn().mockRejectedValue(errorResponse),
});
findAbuseReportCommentForm().vm.$emit('submitForm', {
commentText: noteText,
});
await waitForPromises();
const errorMessage = errorResponse
? 'Your comment could not be submitted because the discussion could not be found.'
: 'Your comment could not be submitted! Please check your network connection and try again.';
expect(createAlert).toHaveBeenCalledWith({
message: errorMessage,
captureError: true,
parent: expect.anything(),
});
});
});
describe('Replying to a comment', () => {
beforeEach(() => {
createComponent({
discussionId: 'gid://gitlab/Discussion/9c7228e06fb0339a3d1440fcda960acfd8baa43a',
isNewDiscussion: false,
});
});
it('should show comment form when reply textarea is clicked on', async () => {
await findReplyTextarea().trigger('click');
expect(findAbuseReportCommentForm().exists()).toBe(true);
});
});
});

View File

@ -0,0 +1,214 @@
import { nextTick } from 'vue';
import { shallowMount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
import { ESC_KEY, ENTER_KEY } from '~/lib/utils/keys';
import * as autosave from '~/lib/utils/autosave';
import * as confirmViaGlModal from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import AbuseReportCommentForm from '~/admin/abuse_report/components/notes/abuse_report_comment_form.vue';
import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
import { mockAbuseReport } from '../../mock_data';
jest.mock('~/lib/utils/autosave', () => ({
updateDraft: jest.fn(),
clearDraft: jest.fn(),
getDraft: jest.fn().mockReturnValue(''),
}));
jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal', () => ({
confirmAction: jest.fn().mockResolvedValue(true),
}));
describe('Abuse Report Comment Form', () => {
let wrapper;
const mockAbuseReportId = mockAbuseReport.report.globalId;
const mockAutosaveKey = `${mockAbuseReportId}-comment`;
const mockInitialValue = 'note text';
const findMarkdownEditor = () => wrapper.findComponent(MarkdownEditor);
const findCancelButton = () => wrapper.find('[data-testid="cancel-button"]');
const findCommentButton = () => wrapper.find('[data-testid="comment-button"]');
const createComponent = ({
abuseReportId = mockAbuseReportId,
isSubmitting = false,
initialValue = mockInitialValue,
autosaveKey = mockAutosaveKey,
isNewDiscussion = true,
} = {}) => {
wrapper = shallowMount(AbuseReportCommentForm, {
propsData: {
abuseReportId,
isSubmitting,
initialValue,
autosaveKey,
isNewDiscussion,
},
provide: {
uploadNoteAttachmentPath: 'test-upload-path',
},
});
};
describe('Markdown editor', () => {
it('should show markdown editor', () => {
createComponent();
expect(findMarkdownEditor().exists()).toBe(true);
expect(findMarkdownEditor().props()).toMatchObject({
value: mockInitialValue,
renderMarkdownPath: '',
uploadsPath: 'test-upload-path',
enableContentEditor: false,
formFieldProps: {
'aria-label': 'Add a reply',
placeholder: 'Write a comment or drag your files here…',
id: 'abuse-report-add-or-edit-comment',
name: 'abuse-report-add-or-edit-comment',
},
markdownDocsPath: '/help/user/markdown',
});
});
it('should pass the draft from local storage if it exists', () => {
jest.spyOn(autosave, 'getDraft').mockImplementation(() => 'draft comment');
createComponent();
expect(findMarkdownEditor().props('value')).toBe('draft comment');
});
it('should pass an empty string if both draft & initialValue are empty', () => {
jest.spyOn(autosave, 'getDraft').mockImplementation(() => '');
createComponent({ initialValue: '' });
expect(findMarkdownEditor().props('value')).toBe('');
});
});
describe('Markdown Editor input', () => {
beforeEach(() => {
createComponent();
});
it('should set the correct comment text value', async () => {
findMarkdownEditor().vm.$emit('input', 'new comment');
await nextTick();
expect(findMarkdownEditor().props('value')).toBe('new comment');
});
it('should call `updateDraft` with correct parameters', () => {
findMarkdownEditor().vm.$emit('input', 'new comment');
expect(autosave.updateDraft).toHaveBeenCalledWith(mockAutosaveKey, 'new comment');
});
});
describe('Submitting a comment', () => {
beforeEach(() => {
jest.spyOn(autosave, 'getDraft').mockImplementation(() => 'draft comment');
createComponent();
});
it('should show comment button', () => {
expect(findCommentButton().exists()).toBe(true);
expect(findCommentButton().text()).toBe('Comment');
});
it('should show `Reply` button if its not a new discussion', () => {
createComponent({ isNewDiscussion: false });
expect(findCommentButton().text()).toBe('Reply');
});
describe('when enter with meta key is pressed', () => {
beforeEach(() => {
findMarkdownEditor().vm.$emit(
'keydown',
new KeyboardEvent('keydown', { key: ENTER_KEY, metaKey: true }),
);
});
it('should emit `submitForm` event with correct parameters', () => {
expect(wrapper.emitted('submitForm')).toEqual([[{ commentText: 'draft comment' }]]);
});
});
describe('when ctrl+enter is pressed', () => {
beforeEach(() => {
findMarkdownEditor().vm.$emit(
'keydown',
new KeyboardEvent('keydown', { key: ENTER_KEY, ctrlKey: true }),
);
});
it('should emit `submitForm` event with correct parameters', () => {
expect(wrapper.emitted('submitForm')).toEqual([[{ commentText: 'draft comment' }]]);
});
});
describe('when comment button is clicked', () => {
beforeEach(() => {
findCommentButton().vm.$emit('click');
});
it('should emit `submitForm` event with correct parameters', () => {
expect(wrapper.emitted('submitForm')).toEqual([[{ commentText: 'draft comment' }]]);
});
});
});
describe('Cancel editing', () => {
beforeEach(() => {
jest.spyOn(autosave, 'getDraft').mockImplementation(() => 'draft comment');
createComponent();
});
it('should show cancel button', () => {
expect(findCancelButton().exists()).toBe(true);
expect(findCancelButton().text()).toBe('Cancel');
});
describe('when escape key is pressed', () => {
beforeEach(() => {
findMarkdownEditor().vm.$emit('keydown', new KeyboardEvent('keydown', { key: ESC_KEY }));
return waitForPromises();
});
it('should confirm a user action if comment text is not empty', () => {
expect(confirmViaGlModal.confirmAction).toHaveBeenCalled();
});
it('should clear draft from local storage', () => {
expect(autosave.clearDraft).toHaveBeenCalledWith(mockAutosaveKey);
});
it('should emit `cancelEditing` event', () => {
expect(wrapper.emitted('cancelEditing')).toHaveLength(1);
});
});
describe('when cancel button is clicked', () => {
beforeEach(() => {
findCancelButton().vm.$emit('click');
return waitForPromises();
});
it('should confirm a user action if comment text is not empty', () => {
expect(confirmViaGlModal.confirmAction).toHaveBeenCalled();
});
it('should clear draft from local storage', () => {
expect(autosave.clearDraft).toHaveBeenCalledWith(mockAutosaveKey);
});
it('should emit `cancelEditing` event', () => {
expect(wrapper.emitted('cancelEditing')).toHaveLength(1);
});
});
});
});

View File

@ -4,6 +4,7 @@ import ToggleRepliesWidget from '~/notes/components/toggle_replies_widget.vue';
import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
import AbuseReportDiscussion from '~/admin/abuse_report/components/notes/abuse_report_discussion.vue';
import AbuseReportNote from '~/admin/abuse_report/components/notes/abuse_report_note.vue';
import AbuseReportAddNote from '~/admin/abuse_report/components/notes/abuse_report_add_note.vue';
import {
mockAbuseReport,
@ -19,6 +20,7 @@ describe('Abuse Report Discussion', () => {
const findAbuseReportNotes = () => wrapper.findAllComponents(AbuseReportNote);
const findTimelineEntryItem = () => wrapper.findComponent(TimelineEntryItem);
const findToggleRepliesWidget = () => wrapper.findComponent(ToggleRepliesWidget);
const findAbuseReportAddNote = () => wrapper.findComponent(AbuseReportAddNote);
const createComponent = ({
discussion = mockDiscussionWithNoReplies,
@ -50,9 +52,13 @@ describe('Abuse Report Discussion', () => {
expect(findTimelineEntryItem().exists()).toBe(false);
});
it('should not show the the toggle replies widget wrapper when no replies', () => {
it('should not show the toggle replies widget wrapper when there are no replies', () => {
expect(findToggleRepliesWidget().exists()).toBe(false);
});
it('should not show the comment form there are no replies', () => {
expect(findAbuseReportAddNote().exists()).toBe(false);
});
});
describe('When the main comments has replies', () => {
@ -75,5 +81,15 @@ describe('Abuse Report Discussion', () => {
await nextTick();
expect(findAbuseReportNotes()).toHaveLength(1);
});
it('should show the comment form', () => {
expect(findAbuseReportAddNote().exists()).toBe(true);
expect(findAbuseReportAddNote().props()).toMatchObject({
abuseReportId: mockAbuseReportId,
discussionId: mockDiscussionWithReplies[0].discussion.id,
isNewDiscussion: false,
});
});
});
});

View File

@ -340,3 +340,52 @@ export const mockNotesByIdResponse = {
},
},
};
export const createAbuseReportNoteResponse = {
data: {
createNote: {
note: {
id: 'gid://gitlab/Note/6',
discussion: {
id: 'gid://gitlab/Discussion/90ca230051611e6e1676c50ba7178e0baeabd98d',
notes: {
nodes: [
{
id: 'gid://gitlab/Note/6',
body: 'Another comment',
bodyHtml: '<p data-sourcepos="1:1-1:15" dir="auto">Another comment</p>',
createdAt: '2023-11-02T02:45:46Z',
lastEditedAt: '2023-11-02T02:45:46Z',
url: 'http://127.0.0.1:3000/admin/abuse_reports/20#note_6',
resolved: false,
author: {
id: 'gid://gitlab/User/1',
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
name: 'Administrator',
username: 'root',
webUrl: 'http://127.0.0.1:3000/root',
},
lastEditedBy: null,
userPermissions: {
adminNote: true,
},
discussion: {
id: 'gid://gitlab/Discussion/90ca230051611e6e1676c50ba7178e0baeabd98d',
notes: {
nodes: [
{
id: 'gid://gitlab/Note/6',
},
],
},
},
},
],
},
},
},
errors: [],
},
},
};

View File

@ -141,7 +141,7 @@ describe('Project Value Stream Analytics actions', () => {
describe('without a selected stage', () => {
it('will select the first stage from the value stream', () => {
const [firstStage] = allowedStages;
testAction({
return testAction({
action: actions.setInitialStage,
state,
payload: null,
@ -154,7 +154,7 @@ describe('Project Value Stream Analytics actions', () => {
describe('with no value stream stages available', () => {
it('will return SET_NO_ACCESS_ERROR', () => {
state = { ...state, stages: [] };
testAction({
return testAction({
action: actions.setInitialStage,
state,
payload: null,
@ -299,25 +299,23 @@ describe('Project Value Stream Analytics actions', () => {
name: 'mock default',
};
const mockValueStreams = [mockValueStream, selectedValueStream];
it('with data, will set the first value stream', () => {
it('with data, will set the first value stream', () =>
testAction({
action: actions.receiveValueStreamsSuccess,
state,
payload: mockValueStreams,
expectedMutations: [{ type: 'RECEIVE_VALUE_STREAMS_SUCCESS', payload: mockValueStreams }],
expectedActions: [{ type: 'setSelectedValueStream', payload: mockValueStream }],
});
});
}));
it('without data, will set the default value stream', () => {
it('without data, will set the default value stream', () =>
testAction({
action: actions.receiveValueStreamsSuccess,
state,
payload: [],
expectedMutations: [{ type: 'RECEIVE_VALUE_STREAMS_SUCCESS', payload: [] }],
expectedActions: [{ type: 'setSelectedValueStream', payload: selectedValueStream }],
});
});
}));
});
describe('fetchValueStreamStages', () => {

View File

@ -165,7 +165,7 @@ describe('setFilters', () => {
issuableType: TYPE_ISSUE,
};
testAction(
return testAction(
actions.setFilters,
filters,
state,
@ -441,7 +441,7 @@ describe('fetchMilestones', () => {
describe('createList', () => {
it('should dispatch createIssueList action', () => {
testAction({
return testAction({
action: actions.createList,
payload: { backlog: true },
expectedActions: [{ type: 'createIssueList', payload: { backlog: true } }],
@ -560,7 +560,7 @@ describe('addList', () => {
};
it('should commit RECEIVE_ADD_LIST_SUCCESS mutation and dispatch fetchItemsForList action', () => {
testAction({
return testAction({
action: actions.addList,
payload: mockLists[1],
state: { ...getters },
@ -1007,7 +1007,7 @@ describe('moveItem', () => {
it('should dispatch moveIssue action with payload', () => {
const payload = { mock: 'payload' };
testAction({
return testAction({
action: actions.moveItem,
payload,
expectedActions: [{ type: 'moveIssue', payload }],
@ -1017,7 +1017,7 @@ describe('moveItem', () => {
describe('moveIssue', () => {
it('should dispatch a correct set of actions', () => {
testAction({
return testAction({
action: actions.moveIssue,
payload: mockMoveIssueParams,
state: mockMoveState,
@ -1092,7 +1092,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('moveIssueCard commits a correct set of actions', () => {
testAction({
return testAction({
action: actions.moveIssueCard,
state,
payload: getMoveData(state, params),
@ -1101,7 +1101,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('undoMoveIssueCard commits a correct set of actions', () => {
testAction({
return testAction({
action: actions.undoMoveIssueCard,
state,
payload: getMoveData(state, params),
@ -1169,7 +1169,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('moveIssueCard commits a correct set of actions', () => {
testAction({
return testAction({
action: actions.moveIssueCard,
state,
payload: getMoveData(state, params),
@ -1178,7 +1178,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('undoMoveIssueCard commits a correct set of actions', () => {
testAction({
return testAction({
action: actions.undoMoveIssueCard,
state,
payload: getMoveData(state, params),
@ -1244,7 +1244,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('moveIssueCard commits a correct set of actions', () => {
testAction({
return testAction({
action: actions.moveIssueCard,
state,
payload: getMoveData(state, params),
@ -1253,7 +1253,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('undoMoveIssueCard commits a correct set of actions', () => {
testAction({
return testAction({
action: actions.undoMoveIssueCard,
state,
payload: getMoveData(state, params),
@ -1298,7 +1298,7 @@ describe('updateMovedIssueCard', () => {
])(
'should commit UPDATE_BOARD_ITEM with a correctly updated issue data when %s',
(_, { state, moveData, updatedIssue }) => {
testAction({
return testAction({
action: actions.updateMovedIssue,
payload: moveData,
state,
@ -1363,7 +1363,7 @@ describe('updateIssueOrder', () => {
},
});
testAction(
return testAction(
actions.updateIssueOrder,
{ moveData },
state,
@ -1395,7 +1395,7 @@ describe('updateIssueOrder', () => {
},
});
testAction(
return testAction(
actions.updateIssueOrder,
{ moveData },
state,
@ -1448,7 +1448,7 @@ describe('addListItem', () => {
inProgress: true,
};
testAction(
return testAction(
actions.addListItem,
payload,
{},
@ -1475,7 +1475,7 @@ describe('addListItem', () => {
position: 0,
};
testAction(
return testAction(
actions.addListItem,
payload,
{},
@ -1503,7 +1503,7 @@ describe('removeListItem', () => {
itemId: mockIssue.id,
};
testAction(actions.removeListItem, payload, {}, [
return testAction(actions.removeListItem, payload, {}, [
{ type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload },
{ type: types.REMOVE_BOARD_ITEM, payload: mockIssue.id },
]);
@ -1608,7 +1608,7 @@ describe('addListNewIssue', () => {
},
});
testAction({
return testAction({
action: actions.addListNewIssue,
payload: {
issueInput: mockIssue,
@ -1651,7 +1651,7 @@ describe('addListNewIssue', () => {
},
});
testAction({
return testAction({
action: actions.addListNewIssue,
payload: {
issueInput: mockIssue,
@ -1700,7 +1700,7 @@ describe('setActiveIssueLabels', () => {
value: labels,
};
testAction(
return testAction(
actions.setActiveIssueLabels,
input,
{ ...state, ...getters },
@ -1721,7 +1721,7 @@ describe('setActiveIssueLabels', () => {
value: [labels[1]],
};
testAction(
return testAction(
actions.setActiveIssueLabels,
{ ...input, removeLabelIds: [getIdFromGraphQLId(labels[0].id)] },
{ ...state, ...getters },
@ -1962,7 +1962,7 @@ describe('toggleBoardItemMultiSelection', () => {
const boardItem2 = mockIssue2;
it('should commit mutation ADD_BOARD_ITEM_TO_SELECTION if item is not on selection state', () => {
testAction(
return testAction(
actions.toggleBoardItemMultiSelection,
boardItem,
{ selectedBoardItems: [] },
@ -1977,7 +1977,7 @@ describe('toggleBoardItemMultiSelection', () => {
});
it('should commit mutation REMOVE_BOARD_ITEM_FROM_SELECTION if item is on selection state', () => {
testAction(
return testAction(
actions.toggleBoardItemMultiSelection,
boardItem,
{ selectedBoardItems: [mockIssue] },
@ -1992,7 +1992,7 @@ describe('toggleBoardItemMultiSelection', () => {
});
it('should additionally commit mutation ADD_BOARD_ITEM_TO_SELECTION for active issue and dispatch unsetActiveId', () => {
testAction(
return testAction(
actions.toggleBoardItemMultiSelection,
boardItem2,
{ activeId: mockActiveIssue.id, activeBoardItem: mockActiveIssue, selectedBoardItems: [] },
@ -2013,7 +2013,7 @@ describe('toggleBoardItemMultiSelection', () => {
describe('resetBoardItemMultiSelection', () => {
it('should commit mutation RESET_BOARD_ITEM_SELECTION', () => {
testAction({
return testAction({
action: actions.resetBoardItemMultiSelection,
state: { selectedBoardItems: [mockIssue] },
expectedMutations: [
@ -2027,7 +2027,7 @@ describe('resetBoardItemMultiSelection', () => {
describe('toggleBoardItem', () => {
it('should dispatch resetBoardItemMultiSelection and unsetActiveId when boardItem is the active item', () => {
testAction({
return testAction({
action: actions.toggleBoardItem,
payload: { boardItem: mockIssue },
state: {
@ -2038,7 +2038,7 @@ describe('toggleBoardItem', () => {
});
it('should dispatch resetBoardItemMultiSelection and setActiveId when boardItem is not the active item', () => {
testAction({
return testAction({
action: actions.toggleBoardItem,
payload: { boardItem: mockIssue },
state: {
@ -2054,7 +2054,7 @@ describe('toggleBoardItem', () => {
describe('setError', () => {
it('should commit mutation SET_ERROR', () => {
testAction({
return testAction({
action: actions.setError,
payload: { message: 'mayday' },
expectedMutations: [
@ -2085,7 +2085,7 @@ describe('setError', () => {
describe('unsetError', () => {
it('should commit mutation SET_ERROR with undefined as payload', () => {
testAction({
return testAction({
action: actions.unsetError,
expectedMutations: [
{

View File

@ -1,4 +1,5 @@
import '~/commons';
import { GlButton } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import PipelinesCiTemplates from '~/ci/pipelines_page/components/empty_state/pipelines_ci_templates.vue';
@ -14,12 +15,15 @@ describe('Pipelines CI Templates', () => {
return shallowMountExtended(PipelinesCiTemplates, {
provide: {
pipelineEditorPath,
showJenkinsCiPrompt: false,
...propsData,
},
stubs,
});
};
const findMigrateFromJenkinsPrompt = () => wrapper.findByTestId('migrate-from-jenkins-prompt');
const findMigrationPlanBtn = () => findMigrateFromJenkinsPrompt().findComponent(GlButton);
const findTestTemplateLink = () => wrapper.findByTestId('test-template-link');
const findCiTemplates = () => wrapper.findComponent(CiTemplates);
@ -34,6 +38,27 @@ describe('Pipelines CI Templates', () => {
);
expect(findCiTemplates().exists()).toBe(true);
});
it('does not show migrate from jenkins prompt', () => {
expect(findMigrateFromJenkinsPrompt().exists()).toBe(false);
});
describe('when Jenkinsfile is detected', () => {
beforeEach(() => {
wrapper = createWrapper({ showJenkinsCiPrompt: true });
});
it('shows migrate from jenkins prompt', () => {
expect(findMigrateFromJenkinsPrompt().exists()).toBe(true);
});
it('opens correct link in new tab after clicking migration plan CTA', () => {
expect(findMigrationPlanBtn().attributes('href')).toBe(
'/help/ci/migration/plan_a_migration',
);
expect(findMigrationPlanBtn().attributes('target')).toBe('_blank');
});
});
});
describe('tracking', () => {
@ -54,5 +79,27 @@ describe('Pipelines CI Templates', () => {
label: 'Getting-Started',
});
});
describe('when Jenkinsfile detected', () => {
beforeEach(() => {
wrapper = createWrapper({ showJenkinsCiPrompt: true });
});
it('creates render event on page load', () => {
expect(trackingSpy).toHaveBeenCalledTimes(1);
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'render', {
label: 'migrate_from_jenkins_prompt',
});
});
it('sends an event when migration plan is clicked', () => {
findMigrationPlanBtn().vm.$emit('click');
expect(trackingSpy).toHaveBeenCalledTimes(2);
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'template_clicked', {
label: 'migrate_from_jenkins_prompt',
});
});
});
});
});

View File

@ -110,6 +110,7 @@ describe('Pipelines', () => {
suggestedCiTemplates: [],
ciRunnerSettingsPath: defaultProps.ciRunnerSettingsPath,
anyRunnersAvailable: true,
showJenkinsCiPrompt: false,
},
propsData: {
...defaultProps,

View File

@ -36,7 +36,7 @@ describe('deploy freeze store actions', () => {
describe('setSelectedFreezePeriod', () => {
it('commits SET_SELECTED_TIMEZONE mutation', () => {
testAction(
return testAction(
actions.setFreezePeriod,
{
id: 3,
@ -69,7 +69,7 @@ describe('deploy freeze store actions', () => {
describe('setSelectedTimezone', () => {
it('commits SET_SELECTED_TIMEZONE mutation', () => {
testAction(actions.setSelectedTimezone, {}, {}, [
return testAction(actions.setSelectedTimezone, {}, {}, [
{
payload: {},
type: types.SET_SELECTED_TIMEZONE,
@ -80,7 +80,7 @@ describe('deploy freeze store actions', () => {
describe('setFreezeStartCron', () => {
it('commits SET_FREEZE_START_CRON mutation', () => {
testAction(actions.setFreezeStartCron, {}, {}, [
return testAction(actions.setFreezeStartCron, {}, {}, [
{
type: types.SET_FREEZE_START_CRON,
},
@ -90,7 +90,7 @@ describe('deploy freeze store actions', () => {
describe('setFreezeEndCron', () => {
it('commits SET_FREEZE_END_CRON mutation', () => {
testAction(actions.setFreezeEndCron, {}, {}, [
return testAction(actions.setFreezeEndCron, {}, {}, [
{
type: types.SET_FREEZE_END_CRON,
},

View File

@ -631,7 +631,7 @@ describe('DiffsStoreActions', () => {
describe('prefetchFileNeighbors', () => {
it('dispatches two requests to prefetch the next/previous files', () => {
testAction(
return testAction(
diffActions.prefetchFileNeighbors,
{},
{

View File

@ -57,7 +57,7 @@ describe('error tracking actions', () => {
describe('restartPolling', () => {
it('should restart polling', () => {
testAction(
return testAction(
actions.restartPolling,
{},
{},
@ -74,7 +74,7 @@ describe('error tracking actions', () => {
it('should search by query', () => {
const query = 'search';
testAction(
return testAction(
actions.searchByQuery,
query,
{},
@ -92,7 +92,7 @@ describe('error tracking actions', () => {
it('should search errors by status', () => {
const status = 'ignored';
testAction(
return testAction(
actions.filterByStatus,
status,
{},
@ -106,7 +106,7 @@ describe('error tracking actions', () => {
it('should search by query', () => {
const field = 'frequency';
testAction(
return testAction(
actions.sortByField,
field,
{},
@ -123,7 +123,7 @@ describe('error tracking actions', () => {
it('should set search endpoint', () => {
const endpoint = 'https://sentry.io';
testAction(
return testAction(
actions.setEndpoint,
{ endpoint },
{},
@ -136,7 +136,7 @@ describe('error tracking actions', () => {
describe('fetchPaginatedResults', () => {
it('should start polling the selected page cursor', () => {
const cursor = '1576637570000:1:1';
testAction(
return testAction(
actions.fetchPaginatedResults,
cursor,
{},

View File

@ -1,107 +0,0 @@
import htmlOpenIssue from 'test_fixtures/issues/open-issue.html';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import initTodoToggle, { initNavUserDropdownTracking } from '~/header';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
// TODO: Remove this with the removal of the old navigation.
// See https://gitlab.com/groups/gitlab-org/-/epics/11875.
//
// This and ~/header will be removed. These tests no longer work due to the
// corresponding fixtures changing for
// https://gitlab.com/gitlab-org/gitlab/-/issues/420121.
// eslint-disable-next-line jest/no-disabled-tests
describe.skip('Header', () => {
describe('Todos notification', () => {
const todosPendingCount = '.js-todos-count';
function isTodosCountHidden() {
return document.querySelector(todosPendingCount).classList.contains('hidden');
}
function triggerToggle(newCount) {
const event = new CustomEvent('todo:toggle', {
detail: {
count: newCount,
},
});
document.dispatchEvent(event);
}
beforeEach(() => {
initTodoToggle();
setHTMLFixture(htmlOpenIssue);
});
afterEach(() => {
resetHTMLFixture();
});
it('should update todos-count after receiving the todo:toggle event', () => {
triggerToggle(5);
expect(document.querySelector(todosPendingCount).textContent).toEqual('5');
});
it('should hide todos-count when it is 0', () => {
triggerToggle(0);
expect(isTodosCountHidden()).toEqual(true);
});
it('should show todos-count when it is more than 0', () => {
triggerToggle(10);
expect(isTodosCountHidden()).toEqual(false);
});
describe('when todos-count is 1000', () => {
beforeEach(() => {
triggerToggle(1000);
});
it('should show todos-count', () => {
expect(isTodosCountHidden()).toEqual(false);
});
it('should show 99+ for todos-count', () => {
expect(document.querySelector(todosPendingCount).textContent).toEqual('99+');
});
});
});
describe('Track user dropdown open', () => {
let trackingSpy;
beforeEach(() => {
setHTMLFixture(`
<li class="js-nav-user-dropdown">
<a class="js-buy-pipeline-minutes-link" data-track-action="click_buy_ci_minutes" data-track-label="free" data-track-property="user_dropdown">Buy Pipeline minutes</a>
</li>`);
trackingSpy = mockTracking(
'_category_',
document.querySelector('.js-nav-user-dropdown').element,
jest.spyOn,
);
document.body.dataset.page = 'some:page';
initNavUserDropdownTracking();
});
afterEach(() => {
unmockTracking();
resetHTMLFixture();
});
it('sends a tracking event when the dropdown is opened and contains Buy Pipeline minutes link', () => {
const event = new CustomEvent('shown.bs.dropdown');
document.querySelector('.js-nav-user-dropdown').dispatchEvent(event);
expect(trackingSpy).toHaveBeenCalledWith('some:page', 'show_buy_ci_minutes', {
label: 'free',
property: 'user_dropdown',
});
});
});
});

View File

@ -8,7 +8,7 @@ describe('~/ide/stores/modules/editor/actions', () => {
it('commits with payload', () => {
const payload = {};
testAction(actions.updateFileEditor, payload, {}, [
return testAction(actions.updateFileEditor, payload, {}, [
{ type: types.UPDATE_FILE_EDITOR, payload },
]);
});
@ -18,7 +18,7 @@ describe('~/ide/stores/modules/editor/actions', () => {
it('commits with payload', () => {
const payload = 'path/to/file.txt';
testAction(actions.removeFileEditor, payload, {}, [
return testAction(actions.removeFileEditor, payload, {}, [
{ type: types.REMOVE_FILE_EDITOR, payload },
]);
});
@ -28,7 +28,7 @@ describe('~/ide/stores/modules/editor/actions', () => {
it('commits with payload', () => {
const payload = createTriggerRenamePayload('test', 'test123');
testAction(actions.renameFileEditor, payload, {}, [
return testAction(actions.renameFileEditor, payload, {}, [
{ type: types.RENAME_FILE_EDITOR, payload },
]);
});

View File

@ -142,7 +142,7 @@ describe('Vuex members actions', () => {
describe('showRemoveGroupLinkModal', () => {
it(`commits ${types.SHOW_REMOVE_GROUP_LINK_MODAL} mutation`, () => {
testAction(showRemoveGroupLinkModal, group, state, [
return testAction(showRemoveGroupLinkModal, group, state, [
{
type: types.SHOW_REMOVE_GROUP_LINK_MODAL,
payload: group,
@ -153,7 +153,7 @@ describe('Vuex members actions', () => {
describe('hideRemoveGroupLinkModal', () => {
it(`commits ${types.HIDE_REMOVE_GROUP_LINK_MODAL} mutation`, () => {
testAction(hideRemoveGroupLinkModal, group, state, [
return testAction(hideRemoveGroupLinkModal, group, state, [
{
type: types.HIDE_REMOVE_GROUP_LINK_MODAL,
},
@ -170,7 +170,7 @@ describe('Vuex members actions', () => {
describe('showRemoveMemberModal', () => {
it(`commits ${types.SHOW_REMOVE_MEMBER_MODAL} mutation`, () => {
testAction(showRemoveMemberModal, modalData, state, [
return testAction(showRemoveMemberModal, modalData, state, [
{
type: types.SHOW_REMOVE_MEMBER_MODAL,
payload: modalData,
@ -181,7 +181,7 @@ describe('Vuex members actions', () => {
describe('hideRemoveMemberModal', () => {
it(`commits ${types.HIDE_REMOVE_MEMBER_MODAL} mutation`, () => {
testAction(hideRemoveMemberModal, undefined, state, [
return testAction(hideRemoveMemberModal, undefined, state, [
{
type: types.HIDE_REMOVE_MEMBER_MODAL,
},

View File

@ -134,7 +134,7 @@ describe('merge conflicts actions', () => {
describe('setLoadingState', () => {
it('commits the right mutation', () => {
testAction(
return testAction(
actions.setLoadingState,
true,
{},
@ -151,7 +151,7 @@ describe('merge conflicts actions', () => {
describe('setErrorState', () => {
it('commits the right mutation', () => {
testAction(
return testAction(
actions.setErrorState,
true,
{},
@ -168,7 +168,7 @@ describe('merge conflicts actions', () => {
describe('setFailedRequest', () => {
it('commits the right mutation', () => {
testAction(
return testAction(
actions.setFailedRequest,
'errors in the request',
{},
@ -207,7 +207,7 @@ describe('merge conflicts actions', () => {
describe('setSubmitState', () => {
it('commits the right mutation', () => {
testAction(
return testAction(
actions.setSubmitState,
true,
{},
@ -224,7 +224,7 @@ describe('merge conflicts actions', () => {
describe('updateCommitMessage', () => {
it('commits the right mutation', () => {
testAction(
return testAction(
actions.updateCommitMessage,
'some message',
{},

View File

@ -28,7 +28,7 @@ describe('Milestone combobox Vuex store actions', () => {
describe('setProjectId', () => {
it(`commits ${types.SET_PROJECT_ID} with the new project ID`, () => {
const projectId = '4';
testAction(actions.setProjectId, projectId, state, [
return testAction(actions.setProjectId, projectId, state, [
{ type: types.SET_PROJECT_ID, payload: projectId },
]);
});
@ -37,7 +37,7 @@ describe('Milestone combobox Vuex store actions', () => {
describe('setGroupId', () => {
it(`commits ${types.SET_GROUP_ID} with the new group ID`, () => {
const groupId = '123';
testAction(actions.setGroupId, groupId, state, [
return testAction(actions.setGroupId, groupId, state, [
{ type: types.SET_GROUP_ID, payload: groupId },
]);
});
@ -46,16 +46,19 @@ describe('Milestone combobox Vuex store actions', () => {
describe('setGroupMilestonesAvailable', () => {
it(`commits ${types.SET_GROUP_MILESTONES_AVAILABLE} with the boolean indicating if group milestones are available (Premium)`, () => {
state.groupMilestonesAvailable = true;
testAction(actions.setGroupMilestonesAvailable, state.groupMilestonesAvailable, state, [
{ type: types.SET_GROUP_MILESTONES_AVAILABLE, payload: state.groupMilestonesAvailable },
]);
return testAction(
actions.setGroupMilestonesAvailable,
state.groupMilestonesAvailable,
state,
[{ type: types.SET_GROUP_MILESTONES_AVAILABLE, payload: state.groupMilestonesAvailable }],
);
});
});
describe('setSelectedMilestones', () => {
it(`commits ${types.SET_SELECTED_MILESTONES} with the new selected milestones name`, () => {
const selectedMilestones = ['v1.2.3'];
testAction(actions.setSelectedMilestones, selectedMilestones, state, [
return testAction(actions.setSelectedMilestones, selectedMilestones, state, [
{ type: types.SET_SELECTED_MILESTONES, payload: selectedMilestones },
]);
});
@ -63,7 +66,7 @@ describe('Milestone combobox Vuex store actions', () => {
describe('clearSelectedMilestones', () => {
it(`commits ${types.CLEAR_SELECTED_MILESTONES} with the new selected milestones name`, () => {
testAction(actions.clearSelectedMilestones, null, state, [
return testAction(actions.clearSelectedMilestones, null, state, [
{ type: types.CLEAR_SELECTED_MILESTONES },
]);
});
@ -72,14 +75,14 @@ describe('Milestone combobox Vuex store actions', () => {
describe('toggleMilestones', () => {
const selectedMilestone = 'v1.2.3';
it(`commits ${types.ADD_SELECTED_MILESTONE} with the new selected milestone name`, () => {
testAction(actions.toggleMilestones, selectedMilestone, state, [
return testAction(actions.toggleMilestones, selectedMilestone, state, [
{ type: types.ADD_SELECTED_MILESTONE, payload: selectedMilestone },
]);
});
it(`commits ${types.REMOVE_SELECTED_MILESTONE} with the new selected milestone name`, () => {
state.selectedMilestones = [selectedMilestone];
testAction(actions.toggleMilestones, selectedMilestone, state, [
return testAction(actions.toggleMilestones, selectedMilestone, state, [
{ type: types.REMOVE_SELECTED_MILESTONE, payload: selectedMilestone },
]);
});
@ -93,7 +96,7 @@ describe('Milestone combobox Vuex store actions', () => {
};
const searchQuery = 'v1.0';
testAction(
return testAction(
actions.search,
searchQuery,
{ ...state, ...getters },
@ -106,7 +109,7 @@ describe('Milestone combobox Vuex store actions', () => {
describe('when project does not have license to add group milestones', () => {
it(`commits ${types.SET_SEARCH_QUERY} with the new search query to search for project milestones`, () => {
const searchQuery = 'v1.0';
testAction(
return testAction(
actions.search,
searchQuery,
state,
@ -192,7 +195,7 @@ describe('Milestone combobox Vuex store actions', () => {
groupMilestonesEnabled: () => true,
};
testAction(
return testAction(
actions.fetchMilestones,
undefined,
{ ...state, ...getters },
@ -204,7 +207,7 @@ describe('Milestone combobox Vuex store actions', () => {
describe('when project does not have license to add group milestones', () => {
it(`dispatchs fetchProjectMilestones`, () => {
testAction(
return testAction(
actions.fetchMilestones,
undefined,
state,

View File

@ -1129,9 +1129,12 @@ describe('Actions Notes Store', () => {
describe('setConfidentiality', () => {
it('calls the correct mutation with the correct args', () => {
testAction(actions.setConfidentiality, true, { noteableData: { confidential: false } }, [
{ type: mutationTypes.SET_ISSUE_CONFIDENTIAL, payload: true },
]);
return testAction(
actions.setConfidentiality,
true,
{ noteableData: { confidential: false } },
[{ type: mutationTypes.SET_ISSUE_CONFIDENTIAL, payload: true }],
);
});
});

View File

@ -25,7 +25,7 @@ describe('Commit form modal store actions', () => {
describe('clearModal', () => {
it('commits CLEAR_MODAL mutation', () => {
testAction(actions.clearModal, {}, {}, [
return testAction(actions.clearModal, {}, {}, [
{
type: types.CLEAR_MODAL,
},
@ -35,7 +35,7 @@ describe('Commit form modal store actions', () => {
describe('requestBranches', () => {
it('commits REQUEST_BRANCHES mutation', () => {
testAction(actions.requestBranches, {}, {}, [
return testAction(actions.requestBranches, {}, {}, [
{
type: types.REQUEST_BRANCHES,
},
@ -74,7 +74,7 @@ describe('Commit form modal store actions', () => {
describe('setBranch', () => {
it('commits SET_BRANCH mutation', () => {
testAction(
return testAction(
actions.setBranch,
{},
{},
@ -96,7 +96,7 @@ describe('Commit form modal store actions', () => {
describe('setSelectedBranch', () => {
it('commits SET_SELECTED_BRANCH mutation', () => {
testAction(actions.setSelectedBranch, {}, {}, [
return testAction(actions.setSelectedBranch, {}, {}, [
{
type: types.SET_SELECTED_BRANCH,
payload: {},
@ -109,7 +109,7 @@ describe('Commit form modal store actions', () => {
it('commits SET_BRANCHES_ENDPOINT mutation', () => {
const endpoint = 'some/endpoint';
testAction(actions.setBranchesEndpoint, endpoint, {}, [
return testAction(actions.setBranchesEndpoint, endpoint, {}, [
{
type: types.SET_BRANCHES_ENDPOINT,
payload: endpoint,
@ -122,7 +122,7 @@ describe('Commit form modal store actions', () => {
const id = 1;
it('commits SET_SELECTED_PROJECT mutation', () => {
testAction(
return testAction(
actions.setSelectedProject,
id,
{},

View File

@ -53,7 +53,7 @@ describe('Project commits actions', () => {
const data = [{ id: 1 }];
mock.onGet(path).replyOnce(HTTP_STATUS_OK, data);
testAction(
return testAction(
actions.fetchAuthors,
null,
state,
@ -66,7 +66,7 @@ describe('Project commits actions', () => {
const path = '/-/autocomplete/users.json';
mock.onGet(path).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
testAction(actions.fetchAuthors, null, state, [], [{ type: 'receiveAuthorsError' }]);
return testAction(actions.fetchAuthors, null, state, [], [{ type: 'receiveAuthorsError' }]);
});
});
});

View File

@ -28,7 +28,7 @@ describe('Ref selector Vuex store actions', () => {
describe('setEnabledRefTypes', () => {
it(`commits ${types.SET_ENABLED_REF_TYPES} with the enabled ref types`, () => {
testAction(actions.setProjectId, ALL_REF_TYPES, state, [
return testAction(actions.setProjectId, ALL_REF_TYPES, state, [
{ type: types.SET_PROJECT_ID, payload: ALL_REF_TYPES },
]);
});
@ -37,7 +37,7 @@ describe('Ref selector Vuex store actions', () => {
describe('setProjectId', () => {
it(`commits ${types.SET_PROJECT_ID} with the new project ID`, () => {
const projectId = '4';
testAction(actions.setProjectId, projectId, state, [
return testAction(actions.setProjectId, projectId, state, [
{ type: types.SET_PROJECT_ID, payload: projectId },
]);
});
@ -46,7 +46,7 @@ describe('Ref selector Vuex store actions', () => {
describe('setSelectedRef', () => {
it(`commits ${types.SET_SELECTED_REF} with the new selected ref name`, () => {
const selectedRef = 'v1.2.3';
testAction(actions.setSelectedRef, selectedRef, state, [
return testAction(actions.setSelectedRef, selectedRef, state, [
{ type: types.SET_SELECTED_REF, payload: selectedRef },
]);
});
@ -55,14 +55,16 @@ describe('Ref selector Vuex store actions', () => {
describe('setParams', () => {
it(`commits ${types.SET_PARAMS} with the provided params`, () => {
const params = { sort: 'updated_asc' };
testAction(actions.setParams, params, state, [{ type: types.SET_PARAMS, payload: params }]);
return testAction(actions.setParams, params, state, [
{ type: types.SET_PARAMS, payload: params },
]);
});
});
describe('search', () => {
it(`commits ${types.SET_QUERY} with the new search query`, () => {
const query = 'hello';
testAction(actions.search, query, state, [{ type: types.SET_QUERY, payload: query }]);
return testAction(actions.search, query, state, [{ type: types.SET_QUERY, payload: query }]);
});
it.each`
@ -73,7 +75,7 @@ describe('Ref selector Vuex store actions', () => {
`(`dispatches fetch actions for enabled ref types`, ({ enabledRefTypes, expectedActions }) => {
const query = 'hello';
state.enabledRefTypes = enabledRefTypes;
testAction(
return testAction(
actions.search,
query,
state,

View File

@ -93,7 +93,7 @@ describe('Release edit/new actions', () => {
describe('loadDraftRelease', () => {
it(`with no saved release, it commits ${types.INITIALIZE_EMPTY_RELEASE}`, () => {
testAction({
return testAction({
action: actions.loadDraftRelease,
state,
expectedMutations: [{ type: types.INITIALIZE_EMPTY_RELEASE }],
@ -203,7 +203,7 @@ describe('Release edit/new actions', () => {
describe('saveRelease', () => {
it(`commits ${types.REQUEST_SAVE_RELEASE} and then dispatched "createRelease"`, () => {
testAction({
return testAction({
action: actions.saveRelease,
state,
expectedMutations: [{ type: types.REQUEST_SAVE_RELEASE }],
@ -218,7 +218,7 @@ describe('Release edit/new actions', () => {
describe('initializeRelease', () => {
it('dispatches "fetchRelease"', () => {
testAction({
return testAction({
action: actions.initializeRelease,
state,
expectedActions: [{ type: 'fetchRelease' }],
@ -228,7 +228,7 @@ describe('Release edit/new actions', () => {
describe('saveRelease', () => {
it(`commits ${types.REQUEST_SAVE_RELEASE} and then dispatched "updateRelease"`, () => {
testAction({
return testAction({
action: actions.saveRelease,
state,
expectedMutations: [{ type: types.REQUEST_SAVE_RELEASE }],

View File

@ -254,7 +254,7 @@ describe('LabelsSelect Actions', () => {
describe('updateLabelsSetState', () => {
it('updates labels `set` state to match `selectedLabels`', () => {
testAction(
return testAction(
actions.updateLabelsSetState,
{},
state,

View File

@ -0,0 +1,61 @@
import { GlIcon, GlButton } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import DeployKeyItem from '~/vue_shared/components/list_selector/deploy_key_item.vue';
describe('DeployKeyItem spec', () => {
let wrapper;
const MOCK_DATA = { title: 'Some key', owner: 'root', id: '123' };
const createComponent = (props) => {
wrapper = shallowMountExtended(DeployKeyItem, {
propsData: {
data: MOCK_DATA,
...props,
},
});
};
const findIcon = () => wrapper.findComponent(GlIcon);
const findDeleteButton = () => wrapper.findComponent(GlButton);
const findWrapper = () => wrapper.findByTestId('deploy-key-wrapper');
beforeEach(() => createComponent());
it('renders a key icon component', () => {
expect(findIcon().props('name')).toBe('key');
});
it('renders a title and username', () => {
expect(wrapper.text()).toContain('Some key');
expect(wrapper.text()).toContain('@root');
});
it('does not render a delete button by default', () => {
expect(findDeleteButton().exists()).toBe(false);
});
it('emits a select event when the wrapper is clicked', () => {
findWrapper().trigger('click');
expect(wrapper.emitted('select')).toEqual([[MOCK_DATA.id]]);
});
describe('Delete button', () => {
beforeEach(() => createComponent({ canDelete: true }));
it('renders a delete button', () => {
expect(findDeleteButton().exists()).toBe(true);
expect(findDeleteButton().props('icon')).toBe('remove');
});
it('emits a delete event if the delete button is clicked', () => {
const stopPropagation = jest.fn();
findDeleteButton().vm.$emit('click', { stopPropagation });
expect(stopPropagation).toHaveBeenCalled();
expect(wrapper.emitted('delete')).toEqual([[MOCK_DATA.id]]);
});
});
});

View File

@ -7,6 +7,7 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import ListSelector from '~/vue_shared/components/list_selector/index.vue';
import UserItem from '~/vue_shared/components/list_selector/user_item.vue';
import GroupItem from '~/vue_shared/components/list_selector/group_item.vue';
import DeployKeyItem from '~/vue_shared/components/list_selector/deploy_key_item.vue';
import groupsAutocompleteQuery from '~/graphql_shared/queries/groups_autocomplete.query.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
@ -20,18 +21,21 @@ describe('List Selector spec', () => {
let fakeApollo;
const USERS_MOCK_PROPS = {
title: 'Users',
projectPath: 'some/project/path',
groupPath: 'some/group/path',
type: 'users',
};
const GROUPS_MOCK_PROPS = {
title: 'Groups',
projectPath: 'some/project/path',
type: 'groups',
};
const DEPLOY_KEYS_MOCK_PROPS = {
projectPath: 'some/project/path',
type: 'deployKeys',
};
const groupsAutocompleteQuerySuccess = jest.fn().mockResolvedValue(GROUPS_RESPONSE_MOCK);
const createComponent = async (props) => {
@ -56,6 +60,7 @@ describe('List Selector spec', () => {
const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
const findAllUserComponents = () => wrapper.findAllComponents(UserItem);
const findAllGroupComponents = () => wrapper.findAllComponents(GroupItem);
const findAllDeployKeyComponents = () => wrapper.findAllComponents(DeployKeyItem);
beforeEach(() => {
jest.spyOn(Api, 'projectUsers').mockResolvedValue(USERS_RESPONSE_MOCK);
@ -254,4 +259,46 @@ describe('List Selector spec', () => {
});
});
});
describe('Deploy keys type', () => {
beforeEach(() => createComponent(DEPLOY_KEYS_MOCK_PROPS));
it('renders a correct title', () => {
expect(findTitle().exists()).toBe(true);
expect(findTitle().text()).toContain('Deploy keys');
});
it('renders the correct icon', () => {
expect(findIcon().props('name')).toBe('key');
});
describe('selected items', () => {
const selectedKey = { title: 'MyKey', owner: 'peter', id: '123' };
const selectedItems = [selectedKey];
beforeEach(() => createComponent({ ...DEPLOY_KEYS_MOCK_PROPS, selectedItems }));
it('renders a heading with the total selected items', () => {
expect(findTitle().text()).toContain('Deploy keys');
expect(findTitle().text()).toContain('1');
});
it('renders a deploy key component for each selected item', () => {
expect(findAllDeployKeyComponents().length).toBe(selectedItems.length);
expect(findAllDeployKeyComponents().at(0).props()).toMatchObject({
data: selectedKey,
canDelete: true,
});
});
it('emits a delete event when a delete event is emitted from the deploy key component', () => {
const id = '123';
findAllDeployKeyComponents().at(0).vm.$emit('delete', id);
expect(wrapper.emitted('delete')).toEqual([[id]]);
});
// TODO - add a test for the select event once we have API integration
// https://gitlab.com/gitlab-org/gitlab/-/issues/432494
});
});
});

View File

@ -43,7 +43,7 @@ describe('Metrics tab store actions', () => {
it('should call success action when fetching metric images', () => {
service.getMetricImages.mockImplementation(() => Promise.resolve(fileList));
testAction(actions.fetchImages, null, state, [
return testAction(actions.fetchImages, null, state, [
{ type: types.REQUEST_METRIC_IMAGES },
{
type: types.RECEIVE_METRIC_IMAGES_SUCCESS,
@ -80,7 +80,7 @@ describe('Metrics tab store actions', () => {
it('should call success action when uploading an image', () => {
service.uploadMetricImage.mockImplementation(() => Promise.resolve(fileList[0]));
testAction(actions.uploadImage, payload, state, [
return testAction(actions.uploadImage, payload, state, [
{ type: types.REQUEST_METRIC_UPLOAD },
{
type: types.RECEIVE_METRIC_UPLOAD_SUCCESS,
@ -112,7 +112,7 @@ describe('Metrics tab store actions', () => {
it('should call success action when updating an image', () => {
service.updateMetricImage.mockImplementation(() => Promise.resolve());
testAction(actions.updateImage, payload, state, [
return testAction(actions.updateImage, payload, state, [
{ type: types.REQUEST_METRIC_UPLOAD },
{
type: types.RECEIVE_METRIC_UPDATE_SUCCESS,
@ -140,7 +140,7 @@ describe('Metrics tab store actions', () => {
it('should call success action when deleting an image', () => {
service.deleteMetricImage.mockImplementation(() => Promise.resolve());
testAction(actions.deleteImage, payload, state, [
return testAction(actions.deleteImage, payload, state, [
{
type: types.RECEIVE_METRIC_DELETE_SUCCESS,
payload,
@ -151,7 +151,7 @@ describe('Metrics tab store actions', () => {
describe('initial data', () => {
it('should set the initial data correctly', () => {
testAction(actions.setInitialData, initialData, state, [
return testAction(actions.setInitialData, initialData, state, [
{ type: types.SET_INITIAL_DATA, payload: initialData },
]);
});

View File

@ -11,8 +11,8 @@ describe('whats new actions', () => {
describe('openDrawer', () => {
useLocalStorageSpy();
it('should commit openDrawer', () => {
testAction(actions.openDrawer, 'digest-hash', {}, [{ type: types.OPEN_DRAWER }]);
it('should commit openDrawer', async () => {
await testAction(actions.openDrawer, 'digest-hash', {}, [{ type: types.OPEN_DRAWER }]);
expect(window.localStorage.setItem).toHaveBeenCalledWith(
'display-whats-new-notification',
@ -23,7 +23,7 @@ describe('whats new actions', () => {
describe('closeDrawer', () => {
it('should commit closeDrawer', () => {
testAction(actions.closeDrawer, {}, {}, [{ type: types.CLOSE_DRAWER }]);
return testAction(actions.closeDrawer, {}, {}, [{ type: types.CLOSE_DRAWER }]);
});
});
@ -52,7 +52,7 @@ describe('whats new actions', () => {
.onGet('/-/whats_new', { params: { page: undefined, v: undefined } })
.replyOnce(HTTP_STATUS_OK, [{ title: 'GitLab Stories' }]);
testAction(
return testAction(
actions.fetchItems,
{},
{},
@ -69,7 +69,7 @@ describe('whats new actions', () => {
.onGet('/-/whats_new', { params: { page: 8, v: 42 } })
.replyOnce(HTTP_STATUS_OK, [{ title: 'GitLab Stories' }]);
testAction(
return testAction(
actions.fetchItems,
{ page: 8, versionDigest: 42 },
{},
@ -80,11 +80,11 @@ describe('whats new actions', () => {
});
it('if already fetching, does not fetch', () => {
testAction(actions.fetchItems, {}, { fetching: true }, []);
return testAction(actions.fetchItems, {}, { fetching: true }, []);
});
it('should commit fetching, setFeatures and setPagination', () => {
testAction(actions.fetchItems, {}, {}, [
return testAction(actions.fetchItems, {}, {}, [
{ type: types.SET_FETCHING, payload: true },
{ type: types.ADD_FEATURES, payload: [{ title: 'Whats New Drawer', url: 'www.url.com' }] },
{ type: types.SET_PAGE_INFO, payload: { nextPage: 2 } },
@ -94,8 +94,10 @@ describe('whats new actions', () => {
});
describe('setDrawerBodyHeight', () => {
testAction(actions.setDrawerBodyHeight, 42, {}, [
{ type: types.SET_DRAWER_BODY_HEIGHT, payload: 42 },
]);
it('should commit setDrawerBodyHeight', () => {
return testAction(actions.setDrawerBodyHeight, 42, {}, [
{ type: types.SET_DRAWER_BODY_HEIGHT, payload: 42 },
]);
});
});
});

View File

@ -9,8 +9,8 @@ RSpec.describe Resolvers::Ci::Catalog::ResourcesResolver, feature_category: :pip
let_it_be(:project_1) { create(:project, name: 'Z', namespace: namespace) }
let_it_be(:project_2) { create(:project, name: 'A_Test', namespace: namespace) }
let_it_be(:project_3) { create(:project, name: 'L', description: 'Test', namespace: namespace) }
let_it_be(:resource_1) { create(:ci_catalog_resource, project: project_1) }
let_it_be(:resource_2) { create(:ci_catalog_resource, project: project_2) }
let_it_be(:published_resource_1) { create(:ci_catalog_resource, :published, project: project_1) }
let_it_be(:published_resource_2) { create(:ci_catalog_resource, :published, project: project_2) }
let_it_be(:resource_3) { create(:ci_catalog_resource, project: project_3) }
let_it_be(:user) { create(:user) }
@ -34,30 +34,30 @@ RSpec.describe Resolvers::Ci::Catalog::ResourcesResolver, feature_category: :pip
namespace.add_owner(user)
end
it 'returns all catalog resources visible to the current user in the namespace' do
expect(result.items.count).to be(3)
expect(result.items.pluck(:name)).to contain_exactly('Z', 'A_Test', 'L')
it 'returns all published catalog resources visible to the current user in the namespace' do
expect(result.items.count).to be(2)
expect(result.items.pluck(:name)).to contain_exactly('Z', 'A_Test')
end
context 'when the sort parameter is not provided' do
it 'returns all catalog resources sorted by descending created date' do
expect(result.items.pluck(:name)).to eq(%w[L A_Test Z])
it 'returns all published catalog resources sorted by descending created date' do
expect(result.items.pluck(:name)).to eq(%w[A_Test Z])
end
end
context 'when the sort parameter is provided' do
let(:sort) { 'NAME_DESC' }
it 'returns all catalog resources sorted by descending name' do
expect(result.items.pluck(:name)).to eq(%w[Z L A_Test])
it 'returns all published catalog resources sorted by descending name' do
expect(result.items.pluck(:name)).to eq(%w[Z A_Test])
end
end
context 'when the search parameter is provided' do
let(:search) { 'test' }
it 'returns the catalog resources that match the search term' do
expect(result.items.pluck(:name)).to contain_exactly('A_Test', 'L')
it 'returns published catalog resources that match the search term' do
expect(result.items.pluck(:name)).to contain_exactly('A_Test')
end
end
end

View File

@ -97,7 +97,8 @@ RSpec.describe Ci::PipelinesHelper, feature_category: :continuous_integration do
:pipeline_editor_path,
:suggested_ci_templates,
:full_path,
:visibility_pipeline_id_type])
:visibility_pipeline_id_type,
:show_jenkins_ci_prompt])
end
end
@ -123,4 +124,39 @@ RSpec.describe Ci::PipelinesHelper, feature_category: :continuous_integration do
end
end
end
describe '#show_jenkins_ci_prompt' do
using RSpec::Parameterized::TableSyntax
subject { helper.pipelines_list_data(project, 'list_url')[:show_jenkins_ci_prompt] }
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:project) { create(:project, :repository) }
let_it_be(:repository) { project.repository }
before do
sign_in(user)
project.send(add_role_method, user)
allow(repository).to receive(:gitlab_ci_yml).and_return(has_gitlab_ci?)
allow(repository).to receive(:jenkinsfile?).and_return(has_jenkinsfile?)
end
where(:add_role_method, :has_gitlab_ci?, :has_jenkinsfile?, :result) do
# Test permissions
:add_owner | false | true | "true"
:add_maintainer | false | true | "true"
:add_developer | false | true | "true"
:add_guest | false | true | "false"
# Test combination of presence of ci files
:add_owner | false | false | "false"
:add_owner | true | true | "false"
:add_owner | true | false | "false"
end
with_them do
it { expect(subject).to eq(result) }
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::FileDetector do
RSpec.describe Gitlab::FileDetector, feature_category: :global_search do
describe '.types_in_paths' do
it 'returns the file types for the given paths' do
expect(described_class.types_in_paths(%w[README.md CHANGELOG VERSION VERSION]))
@ -116,5 +116,9 @@ RSpec.describe Gitlab::FileDetector do
expect(described_class.type_of(type_name)).to be_nil
end
end
it 'returns the type of a Jenkins config file' do
expect(described_class.type_of('jenkinsfile')).to eq(:jenkinsfile)
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
require 'rack'
require 'request_store'
require_relative '../../../support/helpers/next_instance_of'
require 'gitlab/rspec/next_instance_of'
RSpec.describe Gitlab::Middleware::RequestContext, feature_category: :application_instrumentation do
include NextInstanceOf

View File

@ -480,6 +480,14 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
describe '.send_url' do
let(:url) { 'http://example.com' }
let(:expected_params) do
{
'URL' => url,
'AllowRedirects' => false,
'Body' => '',
'Method' => 'GET'
}
end
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(
@ -488,12 +496,7 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
expect(key).to eq("Gitlab-Workhorse-Send-Data")
expect(command).to eq("send-url")
expect(params).to eq({
'URL' => url,
'AllowRedirects' => false,
'Body' => '',
'Method' => 'GET'
}.deep_stringify_keys)
expect(params).to eq(expected_params)
end
context 'when body, headers and method are specified' do
@ -501,6 +504,14 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
let(:headers) { { Authorization: ['Bearer token'] } }
let(:method) { 'POST' }
let(:expected_params) do
super().merge(
'Body' => body,
'Header' => headers,
'Method' => method
).deep_stringify_keys
end
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(
described_class.send_url(url, body: body, headers: headers, method: method)
@ -508,13 +519,33 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
expect(key).to eq("Gitlab-Workhorse-Send-Data")
expect(command).to eq("send-url")
expect(params).to eq({
'URL' => url,
'AllowRedirects' => false,
'Body' => body,
'Header' => headers,
'Method' => method
}.deep_stringify_keys)
expect(params).to eq(expected_params)
end
end
context 'when timeouts are set' do
let(:timeouts) { { open: '5', read: '5' } }
let(:expected_params) { super().merge('DialTimeout' => '5s', 'ResponseHeaderTimeout' => '5s') }
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(described_class.send_url(url, timeouts: timeouts))
expect(key).to eq("Gitlab-Workhorse-Send-Data")
expect(command).to eq("send-url")
expect(params).to eq(expected_params)
end
end
context 'when an response statuses are set' do
let(:response_statuses) { { error: :service_unavailable, timeout: :bad_request } }
let(:expected_params) { super().merge('ErrorResponseStatus' => 503, 'TimeoutResponseStatus' => 400) }
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(described_class.send_url(url, response_statuses: response_statuses))
expect(key).to eq("Gitlab-Workhorse-Send-Data")
expect(command).to eq("send-url")
expect(params).to eq(expected_params)
end
end
end

View File

@ -26,7 +26,7 @@
require 'fast_spec_helper'
require 'rspec-parameterized'
require_relative '../../support/helpers/next_instance_of'
require 'gitlab/rspec/next_instance_of'
require_relative '../../support/shared_contexts/lib/sbom/package_url_shared_contexts'
RSpec.describe Sbom::PackageUrl, feature_category: :dependency_management do

View File

@ -4,13 +4,14 @@ require 'spec_helper'
RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
let_it_be(:namespace) { create(:group) }
let_it_be(:project_x) { create(:project, namespace: namespace, name: 'X Project') }
let_it_be(:project_a) { create(:project, :public, namespace: namespace, name: 'A Project') }
let_it_be(:project_noaccess) { create(:project, namespace: namespace, name: 'C Project') }
let_it_be(:project_ext) { create(:project, :public, name: 'TestProject') }
let_it_be(:project_x) { create(:project, namespace: namespace, name: 'X Project') }
let_it_be(:public_project_a) { create(:project, :public, namespace: namespace, name: 'A Project') }
let_it_be(:inaccessable_project) { create(:project, namespace: namespace, name: 'C Project') }
let_it_be(:public_project_ext) { create(:project, :public, name: 'TestProject') }
let_it_be(:public_project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
let_it_be(:project_b) do
let_it_be(:private_project) do
create(:project, namespace: namespace, name: 'B Project', description: 'Rspec test framework')
end
@ -18,50 +19,50 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
before_all do
project_x.add_reporter(user)
project_b.add_reporter(user)
project_a.add_reporter(user)
project_ext.add_reporter(user)
private_project.add_reporter(user)
public_project_a.add_reporter(user)
public_project_ext.add_reporter(user)
end
describe '#resources' do
subject(:resources) { list.resources(**params) }
context 'when user is anonymous' do
let(:user) { nil }
let(:params) { {} }
context 'when fetching all resources' do
let_it_be(:resource_1) { create(:ci_catalog_resource, :published, project: public_project_a) }
let_it_be(:resource_2) { create(:ci_catalog_resource, :published, project: public_project_ext) }
let_it_be(:resource_3) { create(:ci_catalog_resource, :published, project: private_project) }
let_it_be(:unpublished_resource) { create(:ci_catalog_resource, project: public_project) }
let!(:resource_1) { create(:ci_catalog_resource, project: project_a) }
let!(:resource_2) { create(:ci_catalog_resource, project: project_ext) }
let!(:resource_3) { create(:ci_catalog_resource, project: project_b) }
context 'when user is anonymous' do
let(:user) { nil }
let(:params) { {} }
it 'returns only resources for public projects' do
is_expected.to contain_exactly(resource_1, resource_2)
end
it 'returns only published resources for public projects' do
is_expected.to contain_exactly(resource_1, resource_2)
is_expected.not_to include(unpublished_resource)
end
context 'when sorting is provided' do
let(:params) { { sort: :name_desc } }
context 'when sorting is provided' do
let(:params) { { sort: :name_desc } }
it 'returns only resources for public projects sorted by name DESC' do
is_expected.to contain_exactly(resource_2, resource_1)
it 'returns only resources for public projects sorted by name DESC' do
is_expected.to contain_exactly(resource_2, resource_1)
end
end
end
end
context 'when search params are provided' do
let(:params) { { search: 'test' } }
context 'when search params are provided' do
let(:params) { { search: 'test' } }
let!(:resource_1) { create(:ci_catalog_resource, project: project_a) }
let!(:resource_2) { create(:ci_catalog_resource, project: project_ext) }
let!(:resource_3) { create(:ci_catalog_resource, project: project_b) }
it 'returns the resources that match the search params' do
is_expected.to contain_exactly(resource_2, resource_3)
end
it 'returns the resources that match the search params' do
is_expected.to contain_exactly(resource_2, resource_3)
end
context 'when search term is too small' do
let(:params) { { search: 'te' } }
context 'when search term is too small' do
let(:params) { { search: 'te' } }
it { is_expected.to be_empty }
it { is_expected.to be_empty }
end
end
end
@ -69,7 +70,7 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
let(:params) { { namespace: namespace } }
context 'when namespace is not a root namespace' do
let(:namespace) { create(:group, :nested) }
let_it_be(:namespace) { create(:group, :nested) }
it 'raises an exception' do
expect { resources }.to raise_error(ArgumentError, 'Namespace is not a root namespace')
@ -78,6 +79,8 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
context 'when the user has access to all projects in the namespace' do
context 'when the namespace has no catalog resources' do
let_it_be(:namespace) { create(:group) }
it { is_expected.to be_empty }
end
@ -87,19 +90,22 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
let_it_be(:tomorrow) { today + 1.day }
let_it_be(:resource_1) do
create(:ci_catalog_resource, project: project_x, latest_released_at: yesterday, created_at: today)
create(:ci_catalog_resource, :published, project: project_x, latest_released_at: yesterday,
created_at: today)
end
let_it_be(:resource_2) do
create(:ci_catalog_resource, project: project_b, latest_released_at: today, created_at: yesterday)
create(:ci_catalog_resource, :published, project: private_project, latest_released_at: today,
created_at: yesterday)
end
let_it_be(:resource_3) do
create(:ci_catalog_resource, project: project_a, latest_released_at: nil, created_at: tomorrow)
create(:ci_catalog_resource, :published, project: public_project_a, latest_released_at: nil,
created_at: tomorrow)
end
let_it_be(:other_namespace_resource) do
create(:ci_catalog_resource, project: project_ext, latest_released_at: tomorrow)
create(:ci_catalog_resource, :published, project: public_project_ext, latest_released_at: tomorrow)
end
it 'contains only catalog resources for projects in that namespace' do
@ -161,8 +167,12 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
end
context 'when the user only has access to some projects in the namespace' do
let!(:accessible_resource) { create(:ci_catalog_resource, project: project_x) }
let!(:inaccessible_resource) { create(:ci_catalog_resource, project: project_noaccess) }
let_it_be(:accessible_resource) { create(:ci_catalog_resource, :published, project: project_x) }
let_it_be(:inaccessible_resource) { create(:ci_catalog_resource, :published, project: inaccessable_project) }
before do
project_x.add_reporter(user)
end
it 'only returns catalog resources for projects the user has access to' do
is_expected.to contain_exactly(accessible_resource)
@ -170,10 +180,11 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
end
context 'when the user does not have access to the namespace' do
let!(:project) { create(:project) }
let!(:resource) { create(:ci_catalog_resource, project: project) }
let(:namespace) { project.namespace }
let_it_be(:inaccessable_namespace) { create(:group) }
let_it_be(:inaccessable_project) { create(:project) }
let_it_be(:inaccessable_catalog_resource) do
create(:ci_catalog_resource, :published, project: inaccessable_project)
end
it { is_expected.to be_empty }
end
@ -184,7 +195,7 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
subject { list.find_resource(id: id) }
context 'when the resource is published and visible to the user' do
let_it_be(:accessible_resource) { create(:ci_catalog_resource, project: project_a, state: :published) }
let_it_be(:accessible_resource) { create(:ci_catalog_resource, :published, project: public_project_a) }
let(:id) { accessible_resource.id }
@ -202,7 +213,7 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
end
context 'when the resource is not published' do
let_it_be(:draft_resource) { create(:ci_catalog_resource, project: project_a, state: :draft) }
let_it_be(:draft_resource) { create(:ci_catalog_resource, project: private_project, state: :draft) }
let(:id) { draft_resource.id }
@ -212,7 +223,7 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
end
context "when the current user cannot read code on the resource's project" do
let_it_be(:inaccessible_resource) { create(:ci_catalog_resource, project: project_noaccess, state: :published) }
let_it_be(:inaccessible_resource) { create(:ci_catalog_resource, :published, project: inaccessable_project) }
let(:id) { inaccessible_resource.id }

View File

@ -1585,6 +1585,29 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
end
describe "#jenkinsfile?" do
let_it_be(:project) { create(:project, :repository) }
it 'returns valid file' do
files = [TestBlob.new('file'), TestBlob.new('Jenkinsfile'), TestBlob.new('copying')]
expect(repository.tree).to receive(:blobs).and_return(files)
expect(repository.jenkinsfile?).to be(true)
end
it 'is case-insensitive' do
files = [TestBlob.new('file'), TestBlob.new('JENKINSFILE'), TestBlob.new('copying')]
expect(repository.tree).to receive(:blobs).and_return(files)
expect(repository.jenkinsfile?).to be(true)
end
it 'returns false if does not exists' do
expect(repository.tree).to receive(:blobs).and_return([])
expect(repository.jenkinsfile?).to be(false)
end
end
describe '#ambiguous_ref?' do
subject { repository.ambiguous_ref?(ref) }

View File

@ -1110,85 +1110,148 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
it { is_expected.to be_allowed(:admin_dependency_proxy) }
end
context 'feature disabled' do
let(:current_user) { owner }
shared_examples 'disallows all dependency proxy access' do
it { is_expected.to be_disallowed(:read_dependency_proxy) }
it { is_expected.to be_disallowed(:admin_dependency_proxy) }
end
shared_examples 'allows dependency proxy read access but not admin' do
it { is_expected.to be_allowed(:read_dependency_proxy) }
it { is_expected.to be_disallowed(:admin_dependency_proxy) }
end
context 'feature disabled' do
let(:current_user) { owner }
before do
stub_config(dependency_proxy: { enabled: false })
end
it_behaves_like 'disallows all dependency proxy access'
end
context 'feature enabled' do
before do
stub_config(dependency_proxy: { enabled: true })
stub_config(dependency_proxy: { enabled: true }, registry: { enabled: true })
end
context 'reporter' do
let(:current_user) { reporter }
context 'human user' do
context 'reporter' do
let(:current_user) { reporter }
it { is_expected.to be_allowed(:read_dependency_proxy) }
it { is_expected.to be_disallowed(:admin_dependency_proxy) }
it_behaves_like 'allows dependency proxy read access but not admin'
end
context 'developer' do
let(:current_user) { developer }
it_behaves_like 'allows dependency proxy read access but not admin'
end
context 'maintainer' do
let(:current_user) { maintainer }
it_behaves_like 'allows dependency proxy read access but not admin'
it_behaves_like 'disabling admin_package feature flag'
end
context 'owner' do
let(:current_user) { owner }
it { is_expected.to be_allowed(:read_dependency_proxy) }
it { is_expected.to be_allowed(:admin_dependency_proxy) }
it_behaves_like 'disabling admin_package feature flag'
end
end
context 'developer' do
let(:current_user) { developer }
context 'deploy token user' do
let!(:group_deploy_token) do
create(:group_deploy_token, group: group, deploy_token: deploy_token)
end
it { is_expected.to be_allowed(:read_dependency_proxy) }
it { is_expected.to be_disallowed(:admin_dependency_proxy) }
subject { described_class.new(deploy_token, group) }
context 'with insufficient scopes' do
let_it_be(:deploy_token) { create(:deploy_token, :group) }
it_behaves_like 'disallows all dependency proxy access'
end
context 'with sufficient scopes' do
let_it_be(:deploy_token) { create(:deploy_token, :group, :dependency_proxy_scopes) }
it_behaves_like 'allows dependency proxy read access but not admin'
end
end
context 'maintainer' do
let(:current_user) { maintainer }
context 'group access token user' do
let_it_be(:bot_user) { create(:user, :project_bot) }
let_it_be(:token) { create(:personal_access_token, user: bot_user, scopes: [Gitlab::Auth::READ_API_SCOPE]) }
it { is_expected.to be_allowed(:read_dependency_proxy) }
it { is_expected.to be_disallowed(:admin_dependency_proxy) }
subject { described_class.new(bot_user, group) }
it_behaves_like 'disabling admin_package feature flag'
context 'not a member of the group' do
it_behaves_like 'disallows all dependency proxy access'
end
context 'a member of the group' do
before do
group.add_guest(bot_user)
end
it_behaves_like 'allows dependency proxy read access but not admin'
end
end
context 'owner' do
let(:current_user) { owner }
context 'all other user types' do
User::USER_TYPES.except(:human, :project_bot).each_value do |user_type|
context "with user_type #{user_type}" do
before do
current_user.update!(user_type: user_type)
end
it { is_expected.to be_allowed(:read_dependency_proxy) }
it { is_expected.to be_allowed(:admin_dependency_proxy) }
context 'when the user has sufficient access' do
let(:current_user) { guest }
it_behaves_like 'disabling admin_package feature flag'
it_behaves_like 'allows dependency proxy read access but not admin'
end
context 'when the user does not have sufficient access' do
let(:current_user) { non_group_member }
it_behaves_like 'disallows all dependency proxy access'
end
end
end
end
end
end
context 'deploy token access' do
let!(:group_deploy_token) do
create(:group_deploy_token, group: group, deploy_token: deploy_token)
end
subject { described_class.new(deploy_token, group) }
context 'a deploy token with read_package_registry scope' do
let(:deploy_token) { create(:deploy_token, :group, read_package_registry: true) }
it { is_expected.to be_allowed(:read_package) }
it { is_expected.to be_allowed(:read_group) }
it { is_expected.to be_disallowed(:create_package) }
end
context 'a deploy token with write_package_registry scope' do
let(:deploy_token) { create(:deploy_token, :group, write_package_registry: true) }
it { is_expected.to be_allowed(:create_package) }
it { is_expected.to be_allowed(:read_package) }
it { is_expected.to be_allowed(:read_group) }
it { is_expected.to be_disallowed(:destroy_package) }
end
context 'a deploy token with dependency proxy scopes' do
let_it_be(:deploy_token) { create(:deploy_token, :group, :dependency_proxy_scopes) }
before do
stub_config(dependency_proxy: { enabled: true })
context 'package registry' do
context 'deploy token user' do
let!(:group_deploy_token) do
create(:group_deploy_token, group: group, deploy_token: deploy_token)
end
it { is_expected.to be_allowed(:read_dependency_proxy) }
it { is_expected.to be_disallowed(:admin_dependency_proxy) }
subject { described_class.new(deploy_token, group) }
context 'with read_package_registry scope' do
let(:deploy_token) { create(:deploy_token, :group, read_package_registry: true) }
it { is_expected.to be_allowed(:read_package) }
it { is_expected.to be_allowed(:read_group) }
it { is_expected.to be_disallowed(:create_package) }
end
context 'with write_package_registry scope' do
let(:deploy_token) { create(:deploy_token, :group, write_package_registry: true) }
it { is_expected.to be_allowed(:create_package) }
it { is_expected.to be_allowed(:read_package) }
it { is_expected.to be_allowed(:read_group) }
it { is_expected.to be_disallowed(:destroy_package) }
end
end
end

View File

@ -29,8 +29,11 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
)
end
let_it_be(:resource1) { create(:ci_catalog_resource, project: project1, latest_released_at: '2023-01-01T00:00:00Z') }
let_it_be(:public_resource) { create(:ci_catalog_resource, project: public_project) }
let_it_be(:resource1) do
create(:ci_catalog_resource, :published, project: project1, latest_released_at: '2023-01-01T00:00:00Z')
end
let_it_be(:public_resource) { create(:ci_catalog_resource, :published, project: public_project) }
let(:query) do
<<~GQL
@ -61,7 +64,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
run_with_clean_state(query, context: ctx)
end
create(:ci_catalog_resource, project: project2)
create(:ci_catalog_resource, :published, project: project2)
expect do
run_with_clean_state(query, context: ctx)
@ -119,7 +122,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
end
it 'limits the request to 1 resource at a time' do
create(:ci_catalog_resource, project: project2)
create(:ci_catalog_resource, :published, project: project2)
post_query
@ -327,8 +330,8 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
end
it 'returns catalog resources with the expected data' do
resource2 = create(:ci_catalog_resource, project: project2)
_resource_in_another_namespace = create(:ci_catalog_resource)
resource2 = create(:ci_catalog_resource, :published, project: project2)
_resource_in_another_namespace = create(:ci_catalog_resource, :published)
post_query

View File

@ -3,37 +3,84 @@
require 'spec_helper'
RSpec.describe 'Uploads', 'routing' do
it 'allows creating uploads for personal snippets' do
expect(post('/uploads/personal_snippet?id=1')).to route_to(
controller: 'uploads',
action: 'create',
model: 'personal_snippet',
id: '1'
)
context 'for personal snippets' do
it 'allows creating uploads for personal snippets' do
expect(post('/uploads/personal_snippet?id=1')).to route_to(
controller: 'uploads',
action: 'create',
model: 'personal_snippet',
id: '1'
)
end
end
it 'allows creating uploads for users' do
expect(post('/uploads/user?id=1')).to route_to(
controller: 'uploads',
action: 'create',
model: 'user',
id: '1'
)
context 'for users' do
it 'allows creating uploads for users' do
expect(post('/uploads/user?id=1')).to route_to(
controller: 'uploads',
action: 'create',
model: 'user',
id: '1'
)
end
end
it 'allows fetching alert metric metric images' do
expect(get('/uploads/-/system/alert_management_metric_image/file/1/test.jpg')).to route_to(
controller: 'uploads',
action: 'show',
model: 'alert_management_metric_image',
id: '1',
filename: 'test.jpg',
mounted_as: 'file'
)
context 'for abuse reports' do
it 'allows fetching uploaded files for abuse reports' do
expect(get('/uploads/-/system/abuse_report/1/secret/test.png')).to route_to(
controller: 'uploads',
action: 'show',
model: 'abuse_report',
id: '1',
secret: 'secret',
filename: 'test.png'
)
end
it 'allows creating uploads for abuse reports' do
expect(post('/uploads/abuse_report?id=1')).to route_to(
controller: 'uploads',
action: 'create',
model: 'abuse_report',
id: '1'
)
end
it 'allows authorizing uploads for abuse reports' do
expect(post('/uploads/abuse_report/authorize')).to route_to(
controller: 'uploads',
action: 'authorize',
model: 'abuse_report'
)
end
it 'allows fetching abuse report screenshots' do
expect(get('/uploads/-/system/abuse_report/screenshot/1/test.jpg')).to route_to(
controller: 'uploads',
action: 'show',
model: 'abuse_report',
id: '1',
filename: 'test.jpg',
mounted_as: 'screenshot'
)
end
end
context 'for alert management' do
it 'allows fetching alert metric metric images' do
expect(get('/uploads/-/system/alert_management_metric_image/file/1/test.jpg')).to route_to(
controller: 'uploads',
action: 'show',
model: 'alert_management_metric_image',
id: '1',
filename: 'test.jpg',
mounted_as: 'file'
)
end
end
it 'does not allow creating uploads for other models' do
unroutable_models = UploadsController::MODEL_CLASSES.keys.compact - %w[personal_snippet user]
unroutable_models = UploadsController::MODEL_CLASSES.keys.compact - %w[personal_snippet user abuse_report]
unroutable_models.each do |model|
expect(post("/uploads/#{model}?id=1")).not_to be_routable

View File

@ -7,8 +7,8 @@ require 'fast_spec_helper'
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47008
require 'rubocop'
require 'rubocop/rspec/shared_contexts/default_rspec_language_config_context'
require 'gitlab/rspec/next_instance_of'
require_relative 'support/helpers/next_instance_of'
require_relative 'rubocop/support_workaround'
RSpec.configure do |config|

View File

@ -1,8 +1,9 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require 'gitlab/rspec/next_instance_of'
require_relative '../../../../scripts/lib/glfm/update_specification'
require_relative '../../../support/helpers/next_instance_of'
# IMPORTANT NOTE: See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#update-specificationrb-script
# for details on the implementation and usage of the `update_specification.rb` script being tested.

View File

@ -21,7 +21,8 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
it 'exposes correct attributes' do
expect(entity_hash.keys).to match_array([
:user,
:report
:report,
:upload_note_attachment_path
])
end

View File

@ -11,7 +11,8 @@ RSpec.describe Admin::AbuseReportDetailsSerializer, feature_category: :insider_t
it 'serializes an abuse report' do
is_expected.to match_array([
:user,
:report
:report,
:upload_note_attachment_path
])
end
end

View File

@ -4,11 +4,12 @@ require 'spec_helper'
RSpec.describe Auth::DependencyProxyAuthenticationService, feature_category: :dependency_proxy do
let_it_be(:user) { create(:user) }
let_it_be(:params) { {} }
let(:service) { described_class.new(nil, user) }
let(:service) { described_class.new(nil, user, params) }
before do
stub_config(dependency_proxy: { enabled: true })
stub_config(dependency_proxy: { enabled: true }, registry: { enabled: true })
end
describe '#execute' do
@ -21,9 +22,13 @@ RSpec.describe Auth::DependencyProxyAuthenticationService, feature_category: :de
end
end
shared_examples 'returning a token' do
it 'returns a token' do
expect(subject[:token]).not_to be_nil
shared_examples 'returning a token with an encoded field' do |field|
it 'returns a token with encoded field' do
token = subject[:token]
expect(token).not_to be_nil
decoded_token = decode(token)
expect(decoded_token[field]).not_to be_nil
end
end
@ -41,14 +46,31 @@ RSpec.describe Auth::DependencyProxyAuthenticationService, feature_category: :de
it_behaves_like 'returning', status: 403, message: 'access forbidden'
end
context 'with a deploy token as user' do
let_it_be(:user) { create(:deploy_token, :group, :dependency_proxy_scopes) }
context 'with a deploy token' do
let_it_be(:deploy_token) { create(:deploy_token, :group, :dependency_proxy_scopes) }
let_it_be(:params) { { deploy_token: deploy_token } }
it_behaves_like 'returning a token'
it_behaves_like 'returning a token with an encoded field', 'deploy_token'
end
context 'with a user' do
it_behaves_like 'returning a token'
context 'with a human user' do
it_behaves_like 'returning a token with an encoded field', 'user_id'
end
context 'all other user types' do
User::USER_TYPES.except(:human, :project_bot).each_value do |user_type|
context "with user_type #{user_type}" do
before do
user.update!(user_type: user_type)
end
it_behaves_like 'returning a token with an encoded field', 'user_id'
end
end
end
def decode(token)
DependencyProxy::AuthTokenService.new(token).execute
end
end
end

View File

@ -30,7 +30,7 @@ RSpec.describe Releases::DestroyService, feature_category: :release_orchestratio
end
context 'when the release is for a catalog resource' do
let!(:catalog_resource) { create(:ci_catalog_resource, project: project, state: 'published') }
let!(:catalog_resource) { create(:ci_catalog_resource, :published, project: project) }
let!(:version) { create(:ci_catalog_resource_version, catalog_resource: catalog_resource, release: release) }
it 'does not update the catalog resources if there are still releases' do

View File

@ -1,6 +1,6 @@
# frozen_string_literal: true
require_relative './next_instance_of'
require 'gitlab/rspec/next_instance_of'
module AfterNextHelpers
class DeferredExpectation

View File

@ -4,9 +4,9 @@
require 'fast_spec_helper'
require 'tmpdir'
require 'fileutils'
require 'gitlab/rspec/next_instance_of'
require_relative '../support/silence_stdout'
require_relative '../support/helpers/next_instance_of'
require_relative '../support/matchers/abort_matcher'
require_relative '../../rubocop/formatter/todo_formatter'
require_relative '../../rubocop/todo_dir'

Some files were not shown because too many files have changed in this diff Show More