Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-02-09 18:07:44 +00:00
parent 608d6aaa3d
commit 453634293e
102 changed files with 1315 additions and 388 deletions

View File

@ -0,0 +1,3 @@
import { initSavedReplies } from '~/saved_replies';
initSavedReplies();

View File

@ -0,0 +1,23 @@
<script>
export default {};
</script>
<template>
<div class="row gl-mt-5">
<div class="col-lg-4">
<h4 class="gl-mt-0">
{{ __('Saved Replies') }}
</h4>
<p>
{{
__(
'Saved replies can be used when creating comments inside issues, merge requests, and epics.',
)
}}
</p>
</div>
<div class="col-lg-8">
<router-view />
</div>
</div>
</template>

View File

@ -0,0 +1,57 @@
<script>
import { GlKeysetPagination, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
import savedRepliesQuery from '../queries/saved_replies.query.graphql';
import ListItem from './list_item.vue';
export default {
apollo: {
savedReplies: {
query: savedRepliesQuery,
update: (r) => r.currentUser?.savedReplies?.nodes,
result({ data }) {
const pageInfo = data.currentUser?.savedReplies?.pageInfo;
this.count = data.currentUser?.savedReplies?.count;
if (pageInfo) {
this.pageInfo = pageInfo;
}
},
},
},
components: {
GlLoadingIcon,
GlKeysetPagination,
GlSprintf,
ListItem,
},
data() {
return {
savedReplies: [],
count: 0,
pageInfo: {},
};
},
};
</script>
<template>
<div>
<gl-loading-icon v-if="$apollo.queries.savedReplies.loading" size="lg" />
<template v-else>
<h5 class="gl-font-lg" data-testid="title">
<gl-sprintf :message="__('My saved replies (%{count})')">
<template #count>{{ count }}</template>
</gl-sprintf>
</h5>
<ul class="gl-list-style-none gl-p-0 gl-m-0">
<list-item v-for="reply in savedReplies" :key="reply.id" :reply="reply" />
</ul>
<gl-keyset-pagination
v-if="pageInfo.hasPreviousPage || pageInfo.hasNextPage"
v-bind="pageInfo"
class="gl-mt-4"
/>
</template>
</div>
</template>

View File

@ -0,0 +1,19 @@
<script>
export default {
props: {
reply: {
type: Object,
required: true,
},
},
};
</script>
<template>
<li class="gl-mb-5">
<div class="gl-display-flex gl-align-items-center">
<strong>{{ reply.name }}</strong>
</div>
<div class="gl-mt-3 gl-font-monospace">{{ reply.content }}</div>
</li>
</template>

View File

@ -0,0 +1,31 @@
import Vue from 'vue';
import VueRouter from 'vue-router';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import routes from './routes';
import App from './components/app.vue';
export const initSavedReplies = () => {
Vue.use(VueApollo);
Vue.use(VueRouter);
const el = document.getElementById('js-saved-replies-root');
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
});
const router = new VueRouter({
base: el.dataset.basePath,
mode: 'history',
routes,
});
// eslint-disable-next-line no-new
new Vue({
el,
router,
apolloProvider,
render(h) {
return h(App);
},
});
};

View File

@ -0,0 +1,15 @@
<script>
import List from '../components/list.vue';
export default {
components: {
List,
},
};
</script>
<template>
<div>
<list />
</div>
</template>

View File

@ -0,0 +1,19 @@
query savedReplies {
currentUser {
id
savedReplies {
nodes {
id
name
content
}
count
pageInfo {
hasNextPage
hasPreviousPage
endCursor
startCursor
}
}
}
}

View File

@ -0,0 +1,8 @@
import IndexComponent from './pages/index.vue';
export default [
{
path: '/',
component: IndexComponent,
},
];

View File

@ -140,6 +140,7 @@ export default {
:note="note"
:discussion-id="discussionId"
@startReplying="showReplyForm"
@deleteNote="$emit('deleteNote', note)"
/>
<discussion-notes-replies-wrapper>
<toggle-replies-widget
@ -155,6 +156,7 @@ export default {
discussion-id="discussionId"
:note="reply"
@startReplying="showReplyForm"
@deleteNote="$emit('deleteNote', reply)"
/>
</template>
<work-item-note-replying v-if="isReplying" :body="replyingText" />

View File

@ -1,5 +1,6 @@
<script>
import { GlAvatarLink, GlAvatar } from '@gitlab/ui';
import { GlAvatarLink, GlAvatar, GlDropdown, GlDropdownItem, GlTooltipDirective } from '@gitlab/ui';
import { __ } from '~/locale';
import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
import NoteBody from '~/work_items/components/notes/work_item_note_body.vue';
import NoteHeader from '~/notes/components/note_header.vue';
@ -8,6 +9,10 @@ import { renderGFM } from '~/behaviors/markdown/render_gfm';
export default {
name: 'WorkItemNoteThread',
i18n: {
moreActionsText: __('More actions'),
deleteNoteText: __('Delete comment'),
},
components: {
TimelineEntryItem,
NoteBody,
@ -15,6 +20,11 @@ export default {
NoteActions,
GlAvatar,
GlAvatarLink,
GlDropdown,
GlDropdownItem,
},
directives: {
GlTooltip: GlTooltipDirective,
},
props: {
note: {
@ -68,6 +78,26 @@ export default {
<div class="note-header">
<note-header :author="author" :created-at="note.createdAt" :note-id="note.id" />
<note-actions :show-reply="showReply" @startReplying="showReplyForm" />
<!-- v-if condition should be moved to "delete" dropdown item as soon as we implement copying the link -->
<gl-dropdown
v-if="note.userPermissions.adminNote"
v-gl-tooltip
icon="ellipsis_v"
text-sr-only
right
:text="$options.i18n.moreActionsText"
:title="$options.i18n.moreActionsText"
category="tertiary"
no-caret
>
<gl-dropdown-item
variant="danger"
data-testid="delete-note-action"
@click="$emit('deleteNote')"
>
{{ $options.i18n.deleteNoteText }}
</gl-dropdown-item>
</gl-dropdown>
</div>
<div class="timeline-discussion-body">
<note-body ref="noteBody" :note="note" />

View File

@ -11,7 +11,7 @@ import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { updateCommentState } from '~/work_items/graphql/cache_utils';
import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
import { getWorkItemQuery } from '../utils';
import createNoteMutation from '../graphql/create_work_item_note.mutation.graphql';
import createNoteMutation from '../graphql/notes/create_work_item_note.mutation.graphql';
import { TRACKING_CATEGORY_SHOW, i18n } from '../constants';
import WorkItemNoteSignedOut from './work_item_note_signed_out.vue';
import WorkItemCommentLocked from './work_item_comment_locked.vue';

View File

@ -1,12 +1,15 @@
<script>
import { GlSkeletonLoader } from '@gitlab/ui';
import { s__ } from '~/locale';
import { GlSkeletonLoader, GlModal } from '@gitlab/ui';
import * as Sentry from '@sentry/browser';
import { s__, __ } from '~/locale';
import { TYPENAME_DISCUSSION, TYPENAME_NOTE } from '~/graphql_shared/constants';
import SystemNote from '~/work_items/components/notes/system_note.vue';
import ActivityFilter from '~/work_items/components/notes/activity_filter.vue';
import { i18n, DEFAULT_PAGE_SIZE_NOTES } from '~/work_items/constants';
import { ASC, DESC } from '~/notes/constants';
import { getWorkItemNotesQuery } from '~/work_items/utils';
import WorkItemDiscussion from '~/work_items/components/notes/work_item_discussion.vue';
import deleteNoteMutation from '../graphql/notes/delete_work_item_notes.mutation.graphql';
import WorkItemCommentForm from './work_item_comment_form.vue';
export default {
@ -20,6 +23,7 @@ export default {
},
components: {
GlSkeletonLoader,
GlModal,
ActivityFilter,
SystemNote,
WorkItemCommentForm,
@ -53,6 +57,7 @@ export default {
isLoadingMore: false,
perPage: DEFAULT_PAGE_SIZE_NOTES,
sortOrder: ASC,
noteToDelete: null,
};
},
computed: {
@ -173,6 +178,45 @@ export default {
.catch((error) => this.$emit('error', error.message));
this.isLoadingMore = false;
},
showDeleteNoteModal(note, discussion) {
const isLastNote = discussion.notes.nodes.length === 1;
this.$refs.deleteNoteModal.show();
this.noteToDelete = { ...note, isLastNote };
},
cancelDeletingNote() {
this.noteToDelete = null;
},
async deleteNote() {
try {
const { id, isLastNote, discussion } = this.noteToDelete;
await this.$apollo.mutate({
mutation: deleteNoteMutation,
variables: {
input: {
id,
},
},
update(cache) {
const deletedObject = isLastNote
? { __typename: TYPENAME_DISCUSSION, id: discussion.id }
: { __typename: TYPENAME_NOTE, id };
cache.modify({
id: cache.identify(deletedObject),
fields: (_, { DELETE }) => DELETE,
});
},
optimisticResponse: {
destroyNote: {
note: null,
__typename: 'DestroyNotePayload',
},
},
});
} catch (error) {
this.$emit('error', __('Something went wrong when deleting a comment. Please try again'));
Sentry.captureException(error);
}
},
},
};
</script>
@ -226,6 +270,7 @@ export default {
:work-item-id="workItemId"
:fetch-by-iid="fetchByIid"
:work-item-type="workItemType"
@deleteNote="showDeleteNoteModal($event, discussion)"
/>
</template>
</template>
@ -251,5 +296,17 @@ export default {
</gl-skeleton-loader>
</template>
</div>
<gl-modal
ref="deleteNoteModal"
modal-id="delete-note-modal"
:title="__('Delete comment?')"
:ok-title="__('Delete comment')"
ok-variant="danger"
size="sm"
@primary="deleteNote"
@canceled="cancelDeletingNote"
>
{{ __('Are you sure you want to delete this comment?') }}
</gl-modal>
</div>
</template>

View File

@ -1,4 +1,4 @@
#import "~/work_items/graphql/work_item_note.fragment.graphql"
#import "./work_item_note.fragment.graphql"
mutation createWorkItemNote($input: CreateNoteInput!) {
createNote(input: $input) {

View File

@ -0,0 +1,7 @@
mutation deleteWorkItemNote($input: DestroyNoteInput!) {
destroyNote(input: $input) {
note {
id
}
}
}

View File

@ -1,5 +1,5 @@
#import "~/graphql_shared/fragments/user.fragment.graphql"
#import "~/work_items/graphql/work_item_note.fragment.graphql"
#import "./work_item_note.fragment.graphql"
fragment WorkItemDiscussionNote on Note {
id

View File

@ -1,4 +1,4 @@
#import "~/work_items/graphql/work_item_discussion_note.fragment.graphql"
#import "./work_item_discussion_note.fragment.graphql"
subscription workItemNoteCreated($noteableId: NoteableID) {
workItemNoteCreated(noteableId: $noteableId) {

View File

@ -1,4 +1,4 @@
#import "~/work_items/graphql/work_item_note.fragment.graphql"
#import "./work_item_note.fragment.graphql"
subscription workItemNoteUpdated($noteableId: NoteableID) {
workItemNoteUpdated(noteableId: $noteableId) {

View File

@ -1,5 +1,5 @@
#import "~/graphql_shared/fragments/page_info.fragment.graphql"
#import "~/work_items/graphql/work_item_note.fragment.graphql"
#import "./work_item_note.fragment.graphql"
query workItemNotes($id: WorkItemID!, $after: String, $pageSize: Int) {
workItem(id: $id) {

View File

@ -1,5 +1,5 @@
#import "~/graphql_shared/fragments/page_info.fragment.graphql"
#import "~/work_items/graphql/work_item_note.fragment.graphql"
#import "./work_item_note.fragment.graphql"
query workItemNotesByIid($fullPath: ID!, $iid: String, $after: String, $pageSize: Int) {
workspace: project(fullPath: $fullPath) {

View File

@ -1,8 +1,8 @@
import { WIDGET_TYPE_HIERARCHY } from '~/work_items/constants';
import workItemQuery from './graphql/work_item.query.graphql';
import workItemByIidQuery from './graphql/work_item_by_iid.query.graphql';
import workItemNotesIdQuery from './graphql/work_item_notes.query.graphql';
import workItemNotesByIidQuery from './graphql/work_item_notes_by_iid.query.graphql';
import workItemNotesIdQuery from './graphql/notes/work_item_notes.query.graphql';
import workItemNotesByIidQuery from './graphql/notes/work_item_notes_by_iid.query.graphql';
export function getWorkItemQuery(isFetchedByIid) {
return isFetchedByIid ? workItemByIidQuery : workItemQuery;

View File

@ -637,6 +637,11 @@ $system-note-svg-size: 1rem;
&.new {
border-right-width: 0;
}
.note-header {
flex-wrap: wrap;
align-items: center;
}
}
.notes {

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module Profiles
class SavedRepliesController < Profiles::ApplicationController
feature_category :user_profile
before_action do
render_404 unless Feature.enabled?(:saved_replies, current_user)
@hide_search_settings = true
end
end
end

View File

@ -12,7 +12,8 @@ class Projects::NotesController < Projects::ApplicationController
before_action :authorize_resolve_note!, only: [:resolve, :unresolve]
feature_category :team_planning
urgency :low
urgency :medium, [:index]
urgency :low, [:create, :update, :destroy, :resolve, :unresolve, :toggle_award_emoji, :outdated_line_change]
def delete_attachment
note.remove_attachment!

View File

@ -24,9 +24,9 @@ module Mutations
description: 'Direction of access, which defaults to outbound.'
field :ci_job_token_scope,
Types::Ci::JobTokenScopeType,
null: true,
description: "CI job token's scope of access."
Types::Ci::JobTokenScopeType,
null: true,
description: "CI job token's scope of access."
def resolve(project_path:, target_project_path:, direction: :outbound)
project = authorized_find!(project_path)
@ -34,7 +34,7 @@ module Mutations
result = ::Ci::JobTokenScope::RemoveProjectService
.new(project, current_user)
.execute(target_project, direction: direction)
.execute(target_project, direction)
if result.success?
{

View File

@ -300,6 +300,10 @@ module UsersHelper
other: s_('User|Other')
}.with_indifferent_access.freeze
end
def saved_replies_enabled?
Feature.enabled?(:saved_replies, current_user)
end
end
UsersHelper.prepend_mod_with('UsersHelper')

View File

@ -24,10 +24,28 @@ module Taskable
(\s.+) # followed by whitespace and some text.
}x.freeze
# ignore tasks in code or html comment blocks. HTML blocks
# are ok as we allow tasks inside <detail> blocks
REGEX = %r{
#{::Gitlab::Regex.markdown_code_or_html_comment_blocks}
|
(?<task_item>
#{ITEM_PATTERN}
)
}mx.freeze
def self.get_tasks(content)
content.to_s.scan(ITEM_PATTERN).map do |prefix, checkbox, label|
TaskList::Item.new("#{prefix} #{checkbox}", label.strip)
items = []
content.to_s.scan(REGEX) do
next unless $~[:task_item]
$~[:task_item].scan(ITEM_PATTERN) do |prefix, checkbox, label|
items << TaskList::Item.new("#{prefix.strip} #{checkbox}", label.strip)
end
end
items
end
def self.get_updated_tasks(old_content:, new_content:)
@ -67,10 +85,10 @@ module Taskable
checklist_item_noun = n_('checklist item', 'checklist items', sum.item_count)
if short
format(s_('Tasks|%{complete_count}/%{total_count} %{checklist_item_noun}'),
checklist_item_noun: checklist_item_noun, complete_count: sum.complete_count, total_count: sum.item_count)
checklist_item_noun: checklist_item_noun, complete_count: sum.complete_count, total_count: sum.item_count)
else
format(s_('Tasks|%{complete_count} of %{total_count} %{checklist_item_noun} completed'),
checklist_item_noun: checklist_item_noun, complete_count: sum.complete_count, total_count: sum.item_count)
checklist_item_noun: checklist_item_noun, complete_count: sum.complete_count, total_count: sum.item_count)
end
end

View File

@ -11,3 +11,5 @@ class Namespace::Detail < ApplicationRecord
self.primary_key = :namespace_id
end
Namespace::Detail.prepend_mod

View File

@ -17,7 +17,7 @@ module Projects
delegator_override :project_members
def project_members
super + converted_group_members
super.preload(:user) + converted_group_members # rubocop:disable CodeReuse/ActiveRecord
end
delegator_override :description
@ -46,7 +46,7 @@ module Projects
# invitee, it would make the following query return 0 rows since a NULL
# user_id would be present in the subquery
non_null_user_ids = project.project_members.connected_to_user.select(:user_id)
GroupMembersFinder.new(project.group).execute.where.not(user_id: non_null_user_ids)
GroupMembersFinder.new(project.group).execute.where.not(user_id: non_null_user_ids).preload(:user)
end
# rubocop: enable CodeReuse/ActiveRecord
end

View File

@ -15,4 +15,6 @@ class CodequalityDegradationEntity < Grape::Entity
end
expose :web_url
expose :engine_name
end

View File

@ -5,7 +5,7 @@ module Ci
class RemoveProjectService < ::BaseService
include EditScopeValidations
def execute(target_project, direction: :outbound)
def execute(target_project, direction)
validate_edit!(project, target_project, current_user)
if project == target_project

View File

@ -130,6 +130,18 @@
= link_to profile_preferences_path do
%strong.fly-out-top-item-name
= _('Preferences')
- if saved_replies_enabled?
= nav_link(controller: :saved_replies) do
= link_to profile_saved_replies_path do
.nav-icon-container
= sprite_icon('symlink')
%span.nav-item-name
= _('Saved Replies')
%ul.sidebar-sub-level-items.is-fly-out-only
= nav_link(controller: :saved_replies, html_options: { class: "fly-out-top-item" }) do
= link_to profile_saved_replies_path do
%strong.fly-out-top-item-name
= _('Saved Replies')
= nav_link(controller: :active_sessions) do
= link_to profile_active_sessions_path do
.nav-icon-container

View File

@ -0,0 +1,10 @@
- page_title _('Saved Replies')
#js-saved-replies-root.row.gl-mt-5{ data: { base_path: profile_saved_replies_path } }
.col-lg-4
%h4.gl-mt-0
= page_title
%p
= _('Saved replies can be used when creating comments inside issues, merge requests, and epics.')
.col-lg-8
= gl_loading_icon(size: 'lg')

View File

@ -0,0 +1,8 @@
---
name: free_user_cap_over_user_limit_mails
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98438
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/378616
milestone: '15.9'
type: development
group: group::acquisition
default_enabled: false

View File

@ -1,7 +1,7 @@
---
name: limited_capacity_seat_refresh_worker_high
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104099/
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/382725"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104099
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/382725
milestone: '15.9'
type: development
group: group::utilization

View File

@ -1,7 +1,7 @@
---
name: limited_capacity_seat_refresh_worker_low
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104099/
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/382725"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104099
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/382725
milestone: '15.9'
type: development
group: group::utilization

View File

@ -1,7 +1,7 @@
---
name: limited_capacity_seat_refresh_worker_medium
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104099/
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/382725"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104099
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/382725
milestone: '15.9'
type: development
group: group::utilization

View File

@ -1,8 +0,0 @@
---
name: use_primary_and_secondary_stores_for_repository_cache
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/107232#note_1216317991
rollout_issue_url:
milestone: '15.7'
type: development
group: group::scalability
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: use_primary_store_as_default_for_repository_cache
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/107232#note_1216317991
rollout_issue_url:
milestone: '15.7'
type: development
group: group::scalability
default_enabled: false

View File

@ -39,6 +39,8 @@ resource :profile, only: [:show, :update] do
end
resource :preferences, only: [:show, :update]
resources :saved_replies, only: [:index], action: :index
resources :keys, only: [:index, :show, :create, :destroy] do
member do
delete :revoke

View File

@ -337,6 +337,8 @@
- 1
- - migrate_external_diffs
- 1
- - namespaces_free_user_cap_over_limit_notification
- 1
- - namespaces_process_sync_events
- 1
- - namespaces_sync_namespace_name

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddNextOverLimitCheckAtToNamespaceDetails < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
TABLE_NAME = :namespace_details
COLUMN = :next_over_limit_check_at
def up
with_lock_retries do
add_column TABLE_NAME, COLUMN, :datetime_with_timezone, null: true
end
end
def down
with_lock_retries do
remove_column TABLE_NAME, COLUMN
end
end
end

View File

@ -0,0 +1 @@
400cab0a2d3130dd7406024cf982c7312918019197ae06af06696435f6bb5aaa

View File

@ -18310,7 +18310,8 @@ CREATE TABLE namespace_details (
free_user_cap_over_limt_notified_at timestamp with time zone,
free_user_cap_over_limit_notified_at timestamp with time zone,
dashboard_notification_at timestamp with time zone,
dashboard_enforcement_at timestamp with time zone
dashboard_enforcement_at timestamp with time zone,
next_over_limit_check_at timestamp with time zone
);
CREATE TABLE namespace_limits (

View File

@ -11666,6 +11666,7 @@ Represents a code quality degradation on the pipeline.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="codequalitydegradationdescription"></a>`description` | [`String!`](#string) | Description of the code quality degradation. |
| <a id="codequalitydegradationenginename"></a>`engineName` | [`String!`](#string) | Code Quality plugin that reported the finding. |
| <a id="codequalitydegradationfingerprint"></a>`fingerprint` | [`String!`](#string) | Unique fingerprint to identify the code quality degradation. For example, an MD5 hash. |
| <a id="codequalitydegradationline"></a>`line` | [`Int!`](#int) | Line on which the code quality degradation occurred. |
| <a id="codequalitydegradationpath"></a>`path` | [`String!`](#string) | Relative path to the file containing the code quality degradation. |

View File

@ -178,6 +178,8 @@ Example response:
## List group epic board lists
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/385904) in GitLab 15.9.
Gets a list of the epic board's lists.
Does not include `open` and `closed` lists.
@ -236,6 +238,8 @@ Example response:
## Single group epic board list
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/385904) in GitLab 15.9.
Gets a single board list.
```plaintext

View File

@ -1,18 +1,11 @@
---
stage: none
group: Development
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
redirect_to: 'index.md'
remove_date: '2023-05-08'
---
# Community members & roles
This document was moved to [another location](index.md).
GitLab community members and their privileges/responsibilities.
| Roles | Responsibilities | Requirements |
|-------|------------------|--------------|
| Maintainer | Accepts merge requests on several GitLab projects | Added to the [team page](https://about.gitlab.com/company/team/). An expert on code reviews and knows the product/codebase |
| Reviewer | Performs code reviews on MRs | Added to the [team page](https://about.gitlab.com/company/team/) |
| Developer | Has access to GitLab internal infrastructure & issues (for example, HR-related) | GitLab employee or a Core Team member (with an NDA) |
| Contributor | Can make contributions to all GitLab public projects | Have a GitLab.com account |
[List of current reviewers/maintainers](https://about.gitlab.com/handbook/engineering/projects/#gitlab).
<!-- This redirect file can be deleted after <2023-05-08>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->
<!-- Redirects that point to docs in a different project or site (for example, link is not relative and starts with `https:`) expire in one year. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->

View File

@ -23,9 +23,6 @@ GitLab comes in two flavors:
Throughout this guide you will see references to CE and EE for abbreviation.
To get an overview of GitLab community membership, including those that would review or merge
your contributions, visit [the community roles page](community_roles.md).
## Code of conduct
We want to create a welcoming environment for everyone who is interested in contributing.

View File

@ -588,7 +588,7 @@ The following are some available Rake tasks:
| Task | Description |
|:--------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| [`sudo gitlab-rake gitlab:elastic:info`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Outputs debugging information for the Advanced Search integration. |
| [`sudo gitlab-rake gitlab:elastic:index`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Enables Elasticsearch indexing and run `gitlab:elastic:create_empty_index`, `gitlab:elastic:clear_index_status`, `gitlab:elastic:index_projects`, and `gitlab:elastic:index_snippets`. |
| [`sudo gitlab-rake gitlab:elastic:index`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Enables Elasticsearch indexing and run `gitlab:elastic:create_empty_index`, `gitlab:elastic:clear_index_status`, `gitlab:elastic:index_projects`, `gitlab:elastic:index_snippets`, and `gitlab:elastic:index_users`. |
| [`sudo gitlab-rake gitlab:elastic:pause_indexing`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Pauses Elasticsearch indexing. Changes are still tracked. Useful for cluster/index migrations. |
| [`sudo gitlab-rake gitlab:elastic:resume_indexing`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Resumes Elasticsearch indexing. |
| [`sudo gitlab-rake gitlab:elastic:index_projects`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Iterates over all projects, and queues Sidekiq jobs to index them in the background. It can only be used after the index is created. |
@ -598,6 +598,7 @@ The following are some available Rake tasks:
| [`sudo gitlab-rake gitlab:elastic:delete_index`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Removes the GitLab indices and aliases (if they exist) on the Elasticsearch instance. |
| [`sudo gitlab-rake gitlab:elastic:recreate_index`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Wrapper task for `gitlab:elastic:delete_index` and `gitlab:elastic:create_empty_index`. |
| [`sudo gitlab-rake gitlab:elastic:index_snippets`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Performs an Elasticsearch import that indexes the snippets data. |
| [`sudo gitlab-rake gitlab:elastic:index_users`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Imports all users into Elasticsearch. |
| [`sudo gitlab-rake gitlab:elastic:projects_not_indexed`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Displays which projects are not indexed. |
| [`sudo gitlab-rake gitlab:elastic:reindex_cluster`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Schedules a zero-downtime cluster reindexing task. This feature should be used with an index that was created after GitLab 13.0. |
| [`sudo gitlab-rake gitlab:elastic:mark_reindex_failed`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Mark the most recent re-index job as failed. |
@ -647,6 +648,7 @@ When performing a search, the GitLab index uses the following scopes:
| `notes` | Note data |
| `snippets` | Snippet data |
| `wiki_blobs` | Wiki contents |
| `users` | Users |
## Tuning

View File

@ -69,8 +69,8 @@ To simplify administration, we recommend that a GitLab group maintainer or group
| Jira usage | GitLab.com customers need | GitLab self-managed customers need |
|------------|---------------------------|------------------------------------|
| [Atlassian cloud](https://www.atlassian.com/migration/assess/why-cloud) | The [GitLab for Jira Cloud app](https://marketplace.atlassian.com/apps/1221011/gitlab-com-for-jira-cloud?hosting=cloud&tab=overview) from the [Atlassian Marketplace](https://marketplace.atlassian.com). This method offers real-time sync between GitLab.com and Jira. For more information, see [GitLab for Jira Cloud app](connect-app.md). | The GitLab for Jira Cloud app [installed manually](connect-app.md#install-the-gitlab-for-jira-cloud-app-manually). By default, you can install the app from the [Atlassian Marketplace](https://marketplace.atlassian.com/). For more information, see [Connect the GitLab for Jira Cloud app for self-managed instances](connect-app.md#connect-the-gitlab-for-jira-cloud-app-for-self-managed-instances). |
| Your own server | The [Jira DVCS (distributed version control system) connector](dvcs/index.md). This syncs data hourly. | The [Jira DVCS (distributed version control system) connector](dvcs/index.md). This syncs data hourly. |
| [Atlassian cloud](https://www.atlassian.com/migration/assess/why-cloud) | The [GitLab for Jira Cloud app](https://marketplace.atlassian.com/apps/1221011/gitlab-com-for-jira-cloud?hosting=cloud&tab=overview) from the [Atlassian Marketplace](https://marketplace.atlassian.com). This method offers real-time sync between GitLab.com and Jira. The method requires inbound connections for the setup and then pushes data to Jira through outbound connections. For more information, see [GitLab for Jira Cloud app](connect-app.md). | The GitLab for Jira Cloud app [installed manually](connect-app.md#install-the-gitlab-for-jira-cloud-app-manually). By default, you can install the app from the [Atlassian Marketplace](https://marketplace.atlassian.com/). The method requires inbound connections for the setup and then pushes data to Jira through outbound connections. For more information, see [Connect the GitLab for Jira Cloud app for self-managed instances](connect-app.md#connect-the-gitlab-for-jira-cloud-app-for-self-managed-instances). |
| Your own server | The [Jira DVCS connector](dvcs/index.md). This method syncs data every hour and works only with inbound connections. The method tries to set up webhooks in GitLab to implement real-time data sync, which does not work without outbound connections. | The [Jira DVCS connector](dvcs/index.md). This method syncs data every hour and works only with inbound connections. The method tries to set up webhooks in GitLab to implement real-time data sync, which does not work without outbound connections. |
Each GitLab project can be configured to connect to an entire Jira instance. That means after
configuration, one GitLab project can interact with all Jira projects in that instance. For:

View File

@ -6,11 +6,11 @@ module API
include ::API::Helpers::RelatedResourcesHelpers
expose(:has_tasks) do |issue, _|
!issue.task_list_items.empty?
!issue.tasks?
end
expose :task_status, if: -> (issue, _) do
!issue.task_list_items.empty?
!issue.tasks?
end
expose :_links do

View File

@ -424,4 +424,5 @@ module Feature
end
end
Feature.prepend_mod
Feature::ActiveSupportCacheStoreAdapter.prepend_mod_with('Feature::ActiveSupportCacheStoreAdapter')

View File

@ -0,0 +1,42 @@
# frozen_string_literal: true
module Gitlab
module Database
module SchemaValidation
class Database
def initialize(connection)
@connection = connection
end
def fetch_index_by_name(index_name)
index_map[index_name]
end
def indexes
index_map.values
end
private
def index_map
@index_map ||=
fetch_indexes.transform_values! do |index_stmt|
Index.new(PgQuery.parse(index_stmt).tree.stmts.first.stmt.index_stmt)
end
end
attr_reader :connection
def fetch_indexes
sql = <<~SQL
SELECT indexname, indexdef
FROM pg_indexes
WHERE indexname NOT LIKE '%_pkey' AND schemaname IN ('public', 'gitlab_partitions_static');
SQL
@fetch_indexes ||= connection.exec_query(sql).rows.to_h
end
end
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
module Gitlab
module Database
module SchemaValidation
class Index
def initialize(parsed_stmt)
@parsed_stmt = parsed_stmt
end
def name
parsed_stmt.idxname
end
def statement
@statement ||= PgQuery.deparse_stmt(parsed_stmt)
end
private
attr_reader :parsed_stmt
end
end
end
end

View File

@ -4,68 +4,33 @@ module Gitlab
module Database
module SchemaValidation
class Indexes
def initialize(structure_file_path, database_name)
@parsed_structure_file = PgQuery.parse(File.read(structure_file_path))
@database_name = database_name
def initialize(structure_sql, database)
@structure_sql = structure_sql
@database = database
end
def missing_indexes
structure_file_indexes.keys - database_indexes.keys
structure_sql.indexes.map(&:name) - database.indexes.map(&:name)
end
def extra_indexes
database_indexes.keys - structure_file_indexes.keys
database.indexes.map(&:name) - structure_sql.indexes.map(&:name)
end
def wrong_indexes
structure_file_indexes.filter_map do |index_name, index_stmt|
database_index = database_indexes[index_name]
structure_sql.indexes.filter_map do |structure_sql_index|
database_index = database.fetch_index_by_name(structure_sql_index.name)
next if database_index.nil?
next if database_index.statement == structure_sql_index.statement
begin
database_index = PgQuery.deparse_stmt(PgQuery.parse(database_index).tree.stmts.first.stmt.index_stmt)
index_stmt.relation.schemaname = "public" if index_stmt.relation.schemaname == ''
structure_sql_index = PgQuery.deparse_stmt(index_stmt)
index_name unless database_index == structure_sql_index
rescue PgQuery::ParseError
index_name
end
structure_sql_index.name
end
end
private
attr_reader :parsed_structure_file, :database_name
def structure_file_indexes
@structure_file_indexes ||= index_parsed_structure_file.each_with_object({}) do |tree, dic|
index_stmt = tree.stmt.index_stmt
dic[index_stmt.idxname] = index_stmt
end
end
def index_parsed_structure_file
@index_parsed_structure_file ||= parsed_structure_file.tree.stmts.reject { |s| s.stmt.index_stmt.nil? }
end
def database_indexes
sql = <<~SQL
SELECT indexname, indexdef
FROM pg_indexes
WHERE indexname NOT LIKE '%_pkey' AND schemaname IN ('public', 'gitlab_partitions_static');
SQL
@database_indexes ||= connection.exec_query(sql).rows.to_h
end
def connection
@connection ||= Gitlab::Database.database_base_models[database_name].connection
end
attr_reader :structure_sql, :database
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
module Gitlab
module Database
module SchemaValidation
class StructureSql
def initialize(structure_file_path)
@structure_file_path = structure_file_path
end
def indexes
@indexes ||= index_statements.map do |index_statement|
index_statement.relation.schemaname = "public" if index_statement.relation.schemaname == ''
Index.new(index_statement)
end
end
private
attr_reader :structure_file_path
def index_statements
parsed_structure_file.tree.stmts.filter_map { |s| s.stmt.index_stmt }
end
def parsed_structure_file
PgQuery.parse(File.read(structure_file_path))
end
end
end
end
end

View File

@ -98,6 +98,9 @@ methods:
epics:
- :state
# Add in this list the nested associations that are used to export the parent
# association, but are not present in the tree list. In other words, the associations
# that needs to be preloaded but do not need to be exported.
preloads:
export_reorders:

View File

@ -1105,6 +1105,9 @@ methods:
issues:
- :state
# Add in this list the nested associations that are used to export the parent
# association, but are not present in the tree list. In other words, the associations
# that needs to be preloaded but do not need to be exported.
preloads:
issues:
project: :route
@ -1113,8 +1116,8 @@ preloads:
# tags: # needed by tag_list
project: # deprecated: needed by coverage_regex of Ci::Build
merge_requests:
source_project: # needed by source_branch_sha and diff_head_sha
target_project: # needed by target_branch_sha
source_project: :route # needed by source_branch_sha and diff_head_sha
target_project: :route # needed by target_branch_sha
assignees: # needed by assigne_id that is implemented by DeprecatedAssignee
# Specify a custom export reordering for a given relationship

View File

@ -12,9 +12,13 @@ module Gitlab
redis: pool,
compress: Gitlab::Utils.to_boolean(ENV.fetch('ENABLE_REDIS_CACHE_COMPRESSION', '1')),
namespace: CACHE_NAMESPACE,
expires_in: ENV.fetch('GITLAB_RAILS_CACHE_DEFAULT_TTL_SECONDS', 8.hours).to_i # Cache should not grow forever
expires_in: default_ttl_seconds
}
end
def self.default_ttl_seconds
ENV.fetch('GITLAB_RAILS_CACHE_DEFAULT_TTL_SECONDS', 8.hours).to_i
end
end
end
end

View File

@ -14,19 +14,9 @@ module Gitlab
redis: pool,
compress: Gitlab::Utils.to_boolean(ENV.fetch('ENABLE_REDIS_CACHE_COMPRESSION', '1')),
namespace: Cache::CACHE_NAMESPACE,
# Cache should not grow forever
expires_in: ENV.fetch('GITLAB_RAILS_CACHE_DEFAULT_TTL_SECONDS', 8.hours).to_i
expires_in: Cache.default_ttl_seconds
)
end
private
def redis
primary_store = ::Redis.new(params)
secondary_store = ::Redis.new(config_fallback.params)
MultiStore.new(primary_store, secondary_store, store_name)
end
end
end
end

View File

@ -435,30 +435,59 @@ module Gitlab
}x.freeze
end
MARKDOWN_CODE_BLOCK_REGEX = %r{
(?<code>
# Code blocks:
# ```
# Anything, including `>>>` blocks which are ignored by this filter
# ```
^```
.+?
\n```\ *$
)
}mx.freeze
MARKDOWN_HTML_BLOCK_REGEX = %r{
(?<html>
# HTML block:
# <tag>
# Anything, including `>>>` blocks which are ignored by this filter
# </tag>
^<[^>]+?>\ *\n
.+?
\n<\/[^>]+?>\ *$
)
}mx.freeze
MARKDOWN_HTML_COMMENT_BLOCK_REGEX = %r{
(?<html_block_comment>
# HTML block comment:
# <!-- some comment text
# more comment
# and more comment -->
^<!--.*?\ *\n
.+?
\n.*?-->\ *$
)
}mx.freeze
def markdown_code_or_html_blocks
@markdown_code_or_html_blocks ||= %r{
(?<code>
# Code blocks:
# ```
# Anything, including `>>>` blocks which are ignored by this filter
# ```
^```
.+?
\n```\ *$
)
#{MARKDOWN_CODE_BLOCK_REGEX}
|
(?<html>
# HTML block:
# <tag>
# Anything, including `>>>` blocks which are ignored by this filter
# </tag>
#{MARKDOWN_HTML_BLOCK_REGEX}
}mx.freeze
end
^<[^>]+?>\ *\n
.+?
\n<\/[^>]+?>\ *$
)
}mx
def markdown_code_or_html_comment_blocks
@markdown_code_or_html_comment_blocks ||= %r{
#{MARKDOWN_CODE_BLOCK_REGEX}
|
#{MARKDOWN_HTML_COMMENT_BLOCK_REGEX}
}mx.freeze
end
# Based on Jira's project key format

View File

@ -50,12 +50,7 @@ module Gitlab
end
def self.store
if Feature.enabled?(:use_primary_and_secondary_stores_for_repository_cache) ||
Feature.enabled?(:use_primary_store_as_default_for_repository_cache)
Gitlab::Redis::RepositoryCache.cache_store
else
Rails.cache
end
Gitlab::Redis::RepositoryCache.cache_store
end
end
end

View File

@ -140,12 +140,7 @@ module Gitlab
private
def cache
if Feature.enabled?(:use_primary_and_secondary_stores_for_repository_cache) ||
Feature.enabled?(:use_primary_store_as_default_for_repository_cache)
Gitlab::Redis::RepositoryCache
else
Gitlab::Redis::Cache
end
Gitlab::Redis::RepositoryCache
end
def with(&blk)

View File

@ -68,12 +68,7 @@ module Gitlab
private
def cache
if Feature.enabled?(:use_primary_and_secondary_stores_for_repository_cache) ||
Feature.enabled?(:use_primary_store_as_default_for_repository_cache)
Gitlab::Redis::RepositoryCache
else
Gitlab::Redis::Cache
end
Gitlab::Redis::RepositoryCache
end
def with(&blk)

View File

@ -6525,6 +6525,9 @@ msgstr ""
msgid "BillingPlans|Ultimate"
msgstr ""
msgid "BillingPlans|Upgrade"
msgstr ""
msgid "BillingPlans|Upgrade to Premium"
msgstr ""
@ -13377,6 +13380,9 @@ msgstr ""
msgid "Delete comment"
msgstr ""
msgid "Delete comment?"
msgstr ""
msgid "Delete corpus"
msgstr ""
@ -17970,7 +17976,7 @@ msgstr ""
msgid "FreeUserCap|Explore paid plans:"
msgstr ""
msgid "FreeUserCap|Looks like you've reached your limit of %{free_user_limit} members for \"%{namespace_name}\". You can't add any more, but you can manage your existing members, for example, by removing inactive members and replacing them with new members."
msgid "FreeUserCap|It looks like you've reached your limit of %{free_user_limit} members for \"%{namespace_name}\", according to the check we ran on %{date_time}. You can't add any more, but you can manage your existing members, for example, by removing inactive members and replacing them with new members."
msgstr ""
msgid "FreeUserCap|Manage members"
@ -27564,6 +27570,9 @@ msgstr ""
msgid "My company or team"
msgstr ""
msgid "My saved replies (%{count})"
msgstr ""
msgid "My topic"
msgstr ""
@ -37339,6 +37348,12 @@ msgstr ""
msgid "Save pipeline schedule"
msgstr ""
msgid "Saved Replies"
msgstr ""
msgid "Saved replies can be used when creating comments inside issues, merge requests, and epics."
msgstr ""
msgid "Saving"
msgstr ""
@ -40161,6 +40176,9 @@ msgstr ""
msgid "Something went wrong trying to load issue contacts."
msgstr ""
msgid "Something went wrong when deleting a comment. Please try again"
msgstr ""
msgid "Something went wrong when reordering designs. Please try again"
msgstr ""

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Projects::NotesController do
RSpec.describe Projects::NotesController, type: :controller, feature_category: :team_planning do
include ProjectForksHelper
let(:user) { create(:user) }

View File

@ -68,14 +68,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
check_pipeline_response(returned: 2, all: 6)
end
context 'when performing gitaly calls', :request_store do
before do
# To prevent double writes / fallback read due to MultiStore which is failing the `Gitlab::GitalyClient
# .get_request_count` expectation.
stub_feature_flags(use_primary_store_as_default_for_repository_cache: false)
stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false)
end
context 'when performing gitaly calls', :request_store, :use_null_store_as_repository_cache do
it 'limits the Gitaly requests' do
# Isolate from test preparation (Repository#exists? is also cached in RequestStore)
RequestStore.end!

View File

@ -23,7 +23,8 @@ RSpec.describe 'Broadcast Messages', feature_category: :onboarding do
end
shared_examples 'a dismissable Broadcast Messages' do
it 'hides broadcast message after dismiss', :js do
it 'hides broadcast message after dismiss', :js,
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/390900' do
visit root_path
find('.js-dismiss-current-broadcast-notification').click

View File

@ -157,15 +157,10 @@ RSpec.describe "User creates issue", feature_category: :team_planning do
end
end
context 'form filled by URL parameters' do
context 'form filled by URL parameters', :use_null_store_as_repository_cache do
let(:project) { create(:project, :public, :repository) }
before do
# With multistore feature flags enabled (using an actual Redis store instead of NullStore),
# it somehow writes an invalid content to Redis and the specs would fail.
stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false)
stub_feature_flags(use_primary_store_as_default_for_repository_cache: false)
project.repository.create_file(
user,
'.gitlab/issue_templates/bug.md',

View File

@ -122,7 +122,8 @@ RSpec.describe 'Profile > SSH Keys', feature_category: :user_profile do
project.add_developer(user)
end
it 'revoking the SSH key marks commits as unverified' do
it 'revoking the SSH key marks commits as unverified',
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/390905' do
visit project_commit_path(project, commit)
find('a.gpg-status-box', text: 'Verified').click

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Profile > Notifications > List users saved replies', :js,
feature_category: :user_profile do
let_it_be(:user) { create(:user) }
let_it_be(:saved_reply) { create(:saved_reply, user: user) }
before do
sign_in(user)
end
it 'shows the user a list of their saved replies' do
visit profile_saved_replies_path
expect(page).to have_content('My saved replies (1)')
expect(page).to have_content(saved_reply.name)
expect(page).to have_content(saved_reply.content)
end
end

View File

@ -333,6 +333,41 @@ RSpec.describe 'Task Lists', :js, feature_category: :team_planning do
expect(page).to have_selector('ul.task-list', count: 1)
expect(page).to have_selector('li.task-list-item', count: 1)
expect(page).to have_selector('ul input[checked]', count: 1)
expect(page).to have_content('1 of 1 checklist item completed')
end
end
describe 'tasks in code blocks' do
let(:code_tasks_markdown) do
<<-EOT.strip_heredoc
```
- [ ] a
```
- [ ] b
EOT
end
let!(:issue) { create(:issue, description: code_tasks_markdown, author: user, project: project) }
it 'renders' do
visit_issue(project, issue)
wait_for_requests
expect(page).to have_selector('ul.task-list', count: 1)
expect(page).to have_selector('li.task-list-item', count: 1)
expect(page).to have_selector('ul input[checked]', count: 0)
find('.task-list-item-checkbox').click
wait_for_requests
visit_issue(project, issue)
wait_for_requests
expect(page).to have_selector('ul.task-list', count: 1)
expect(page).to have_selector('li.task-list-item', count: 1)
expect(page).to have_selector('ul input[checked]', count: 1)
expect(page).to have_content('1 of 1 checklist item completed')
end
end
@ -370,6 +405,43 @@ RSpec.describe 'Task Lists', :js, feature_category: :team_planning do
end
end
describe 'summary properly formatted' do
let(:summary_markdown) do
<<-EOT.strip_heredoc
<details open>
<summary>Valid detail/summary with tasklist</summary>
- [ ] People Ops: do such and such
</details>
* [x] Task 1
EOT
end
let!(:issue) { create(:issue, description: summary_markdown, author: user, project: project) }
it 'renders' do
visit_issue(project, issue)
wait_for_requests
expect(page).to have_selector('ul.task-list', count: 2)
expect(page).to have_selector('li.task-list-item', count: 2)
expect(page).to have_selector('ul input[checked]', count: 1)
first('.task-list-item-checkbox').click
wait_for_requests
visit_issue(project, issue)
wait_for_requests
expect(page).to have_selector('ul.task-list', count: 2)
expect(page).to have_selector('li.task-list-item', count: 2)
expect(page).to have_selector('ul input[checked]', count: 2)
expect(page).to have_content('2 of 2 checklist items completed')
end
end
describe 'markdown starting with new line character' do
let(:markdown_starting_with_new_line) do
<<-EOT.strip_heredoc

View File

@ -21,7 +21,10 @@
},
"web_url": {
"type": "string"
},
"engine_name": {
"type": "string"
}
},
"additionalProperties": false
}
}

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GraphQL::Query, type: :request, feature_category: :user_profile do
include JavaScriptFixturesHelpers
include ApiHelpers
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
before do
sign_in(current_user)
end
context 'when user has no saved replies' do
base_input_path = 'saved_replies/queries/'
base_output_path = 'graphql/saved_replies/'
query_name = 'saved_replies.query.graphql'
it "#{base_output_path}saved_replies_empty.query.graphql.json" do
query = get_graphql_query_as_string("#{base_input_path}#{query_name}")
post_graphql(query, current_user: current_user)
expect_graphql_errors_to_be_empty
end
end
context 'when user has saved replies' do
base_input_path = 'saved_replies/queries/'
base_output_path = 'graphql/saved_replies/'
query_name = 'saved_replies.query.graphql'
it "#{base_output_path}saved_replies.query.graphql.json" do
create(:saved_reply, user: current_user)
create(:saved_reply, user: current_user)
query = get_graphql_query_as_string("#{base_input_path}#{query_name}")
post_graphql(query, current_user: current_user)
expect_graphql_errors_to_be_empty
end
end
end

View File

@ -0,0 +1,21 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Saved replies list item component renders list item 1`] = `
<li
class="gl-mb-5"
>
<div
class="gl-display-flex gl-align-items-center"
>
<strong>
test
</strong>
</div>
<div
class="gl-mt-3 gl-font-monospace"
>
/assign_reviewer
</div>
</li>
`;

View File

@ -0,0 +1,22 @@
import { shallowMount } from '@vue/test-utils';
import ListItem from '~/saved_replies/components/list_item.vue';
let wrapper;
function createComponent(propsData = {}) {
return shallowMount(ListItem, {
propsData,
});
}
describe('Saved replies list item component', () => {
afterEach(() => {
wrapper.destroy();
});
it('renders list item', async () => {
wrapper = createComponent({ reply: { name: 'test', content: '/assign_reviewer' } });
expect(wrapper.element).toMatchSnapshot();
});
});

View File

@ -0,0 +1,68 @@
import Vue from 'vue';
import { mount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import noSavedRepliesResponse from 'test_fixtures/graphql/saved_replies/saved_replies_empty.query.graphql.json';
import savedRepliesResponse from 'test_fixtures/graphql/saved_replies/saved_replies.query.graphql.json';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import List from '~/saved_replies/components/list.vue';
import ListItem from '~/saved_replies/components/list_item.vue';
import savedRepliesQuery from '~/saved_replies/queries/saved_replies.query.graphql';
let wrapper;
function createMockApolloProvider(response) {
Vue.use(VueApollo);
const requestHandlers = [[savedRepliesQuery, jest.fn().mockResolvedValue(response)]];
return createMockApollo(requestHandlers);
}
function createComponent(options = {}) {
const { mockApollo } = options;
return mount(List, {
apolloProvider: mockApollo,
});
}
describe('Saved replies list component', () => {
afterEach(() => {
wrapper.destroy();
});
it('does not render any list items when response is empty', async () => {
const mockApollo = createMockApolloProvider(noSavedRepliesResponse);
wrapper = createComponent({ mockApollo });
await waitForPromises();
expect(wrapper.findAllComponents(ListItem).length).toBe(0);
});
it('render saved replies count', async () => {
const mockApollo = createMockApolloProvider(savedRepliesResponse);
wrapper = createComponent({ mockApollo });
await waitForPromises();
expect(wrapper.find('[data-testid="title"]').text()).toEqual('My saved replies (2)');
});
it('renders list of saved replies', async () => {
const mockApollo = createMockApolloProvider(savedRepliesResponse);
const savedReplies = savedRepliesResponse.data.currentUser.savedReplies.nodes;
wrapper = createComponent({ mockApollo });
await waitForPromises();
expect(wrapper.findAllComponents(ListItem).length).toBe(2);
expect(wrapper.findAllComponents(ListItem).at(0).props('reply')).toEqual(
expect.objectContaining(savedReplies[0]),
);
expect(wrapper.findAllComponents(ListItem).at(1).props('reply')).toEqual(
expect.objectContaining(savedReplies[1]),
);
});
});

View File

@ -130,4 +130,11 @@ describe('Work Item Discussion', () => {
expect(findToggleRepliesWidget().props('collapsed')).toBe(false);
});
});
it('emits `deleteNote` event with correct parameter when child note component emits `deleteNote` event', () => {
createComponent();
findThreadAtIndex(0).vm.$emit('deleteNote');
expect(wrapper.emitted('deleteNote')).toEqual([[mockWorkItemCommentNote]]);
});
});

View File

@ -1,4 +1,4 @@
import { GlAvatarLink } from '@gitlab/ui';
import { GlAvatarLink, GlDropdown } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import WorkItemNote from '~/work_items/components/notes/work_item_note.vue';
import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
@ -15,6 +15,8 @@ describe('Work Item Note', () => {
const findNoteHeader = () => wrapper.findComponent(NoteHeader);
const findNoteBody = () => wrapper.findComponent(NoteBody);
const findNoteActions = () => wrapper.findComponent(NoteActions);
const findDropdown = () => wrapper.findComponent(GlDropdown);
const findDeleteNoteButton = () => wrapper.find('[data-testid="delete-note-action"]');
const createComponent = ({ note = mockWorkItemCommentNote, isFirstNote = false } = {}) => {
wrapper = shallowMount(WorkItemNote, {
@ -66,4 +68,34 @@ describe('Work Item Note', () => {
expect(findNoteActions().props('showReply')).toBe(false);
});
});
it('should display a dropdown if user has a permission to delete note', () => {
createComponent({
note: {
...mockWorkItemCommentNote,
userPermissions: { ...mockWorkItemCommentNote.userPermissions, adminNote: true },
},
});
expect(findDropdown().exists()).toBe(true);
});
it('should not display a dropdown if user has no permission to delete note', () => {
createComponent();
expect(findDropdown().exists()).toBe(false);
});
it('should emit `deleteNote` event when delete note action is clicked', () => {
createComponent({
note: {
...mockWorkItemCommentNote,
userPermissions: { ...mockWorkItemCommentNote.userPermissions, adminNote: true },
},
});
findDeleteNoteButton().vm.$emit('click');
expect(wrapper.emitted('deleteNote')).toEqual([[]]);
});
});

View File

@ -10,7 +10,7 @@ import MarkdownField from '~/vue_shared/components/markdown/field.vue';
import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
import WorkItemCommentForm from '~/work_items/components/work_item_comment_form.vue';
import WorkItemCommentLocked from '~/work_items/components/work_item_comment_locked.vue';
import createNoteMutation from '~/work_items/graphql/create_work_item_note.mutation.graphql';
import createNoteMutation from '~/work_items/graphql/notes/create_work_item_note.mutation.graphql';
import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';

View File

@ -1,16 +1,18 @@
import { GlSkeletonLoader } from '@gitlab/ui';
import { GlSkeletonLoader, GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import SystemNote from '~/work_items/components/notes/system_note.vue';
import WorkItemNotes from '~/work_items/components/work_item_notes.vue';
import WorkItemDiscussion from '~/work_items/components/notes/work_item_discussion.vue';
import WorkItemCommentForm from '~/work_items/components/work_item_comment_form.vue';
import ActivityFilter from '~/work_items/components/notes/activity_filter.vue';
import workItemNotesQuery from '~/work_items/graphql/work_item_notes.query.graphql';
import workItemNotesByIidQuery from '~/work_items/graphql/work_item_notes_by_iid.query.graphql';
import workItemNotesQuery from '~/work_items/graphql/notes/work_item_notes.query.graphql';
import workItemNotesByIidQuery from '~/work_items/graphql/notes/work_item_notes_by_iid.query.graphql';
import deleteWorkItemNoteMutation from '~/work_items/graphql/notes/delete_work_item_notes.mutation.graphql';
import { DEFAULT_PAGE_SIZE_NOTES, WIDGET_TYPE_NOTES } from '~/work_items/constants';
import { ASC, DESC } from '~/notes/constants';
import {
@ -47,6 +49,8 @@ describe('WorkItemNotes component', () => {
Vue.use(VueApollo);
const showModal = jest.fn();
const findAllSystemNotes = () => wrapper.findAllComponents(SystemNote);
const findAllListItems = () => wrapper.findAll('ul.timeline > *');
const findActivityLabel = () => wrapper.find('label');
@ -56,6 +60,8 @@ describe('WorkItemNotes component', () => {
const findSystemNoteAtIndex = (index) => findAllSystemNotes().at(index);
const findAllWorkItemCommentNotes = () => wrapper.findAllComponents(WorkItemDiscussion);
const findWorkItemCommentNoteAtIndex = (index) => findAllWorkItemCommentNotes().at(index);
const findDeleteNoteModal = () => wrapper.findComponent(GlModal);
const workItemNotesQueryHandler = jest.fn().mockResolvedValue(mockWorkItemNotesResponse);
const workItemNotesByIidQueryHandler = jest
.fn()
@ -64,16 +70,22 @@ describe('WorkItemNotes component', () => {
const workItemNotesWithCommentsQueryHandler = jest
.fn()
.mockResolvedValue(mockWorkItemNotesResponseWithComments);
const deleteWorkItemNoteMutationSuccessHandler = jest.fn().mockResolvedValue({
data: { destroyNote: { note: null, __typename: 'DestroyNote' } },
});
const errorHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
const createComponent = ({
workItemId = mockWorkItemId,
fetchByIid = false,
defaultWorkItemNotesQueryHandler = workItemNotesQueryHandler,
deleteWINoteMutationHandler = deleteWorkItemNoteMutationSuccessHandler,
} = {}) => {
wrapper = shallowMount(WorkItemNotes, {
apolloProvider: createMockApollo([
[workItemNotesQuery, defaultWorkItemNotesQueryHandler],
[workItemNotesByIidQuery, workItemNotesByIidQueryHandler],
[deleteWorkItemNoteMutation, deleteWINoteMutationHandler],
]),
propsData: {
workItemId,
@ -89,6 +101,9 @@ describe('WorkItemNotes component', () => {
useIidInWorkItemsPath: fetchByIid,
},
},
stubs: {
GlModal: stubComponent(GlModal, { methods: { show: showModal } }),
},
});
};
@ -240,4 +255,83 @@ describe('WorkItemNotes component', () => {
);
});
});
it('should open delete modal confirmation when child discussion emits `deleteNote` event', async () => {
createComponent({
defaultWorkItemNotesQueryHandler: workItemNotesWithCommentsQueryHandler,
});
await waitForPromises();
findWorkItemCommentNoteAtIndex(0).vm.$emit('deleteNote', { id: '1', isLastNote: false });
expect(showModal).toHaveBeenCalled();
});
describe('when modal is open', () => {
beforeEach(() => {
createComponent({
defaultWorkItemNotesQueryHandler: workItemNotesWithCommentsQueryHandler,
});
return waitForPromises();
});
it('sends the mutation with correct variables', () => {
const noteId = 'some-test-id';
findWorkItemCommentNoteAtIndex(0).vm.$emit('deleteNote', { id: noteId });
findDeleteNoteModal().vm.$emit('primary');
expect(deleteWorkItemNoteMutationSuccessHandler).toHaveBeenCalledWith({
input: {
id: noteId,
},
});
});
it('successfully removes the note from the discussion', async () => {
expect(findWorkItemCommentNoteAtIndex(0).props('discussion')).toHaveLength(2);
findWorkItemCommentNoteAtIndex(0).vm.$emit('deleteNote', {
id: mockDiscussions[0].notes.nodes[0].id,
});
findDeleteNoteModal().vm.$emit('primary');
await waitForPromises();
expect(findWorkItemCommentNoteAtIndex(0).props('discussion')).toHaveLength(1);
});
it('successfully removes the discussion from work item if discussion only had one note', async () => {
const secondDiscussion = findWorkItemCommentNoteAtIndex(1);
expect(findAllWorkItemCommentNotes()).toHaveLength(2);
expect(secondDiscussion.props('discussion')).toHaveLength(1);
secondDiscussion.vm.$emit('deleteNote', {
id: mockDiscussions[1].notes.nodes[0].id,
discussion: { id: mockDiscussions[1].id },
});
findDeleteNoteModal().vm.$emit('primary');
await waitForPromises();
expect(findAllWorkItemCommentNotes()).toHaveLength(1);
});
});
it('emits `error` event if delete note mutation is rejected', async () => {
createComponent({
defaultWorkItemNotesQueryHandler: workItemNotesWithCommentsQueryHandler,
deleteWINoteMutationHandler: errorHandler,
});
await waitForPromises();
findWorkItemCommentNoteAtIndex(0).vm.$emit('deleteNote', {
id: mockDiscussions[0].notes.nodes[0].id,
});
findDeleteNoteModal().vm.$emit('primary');
await waitForPromises();
expect(wrapper.emitted('error')).toEqual([
['Something went wrong when deleting a comment. Please try again'],
]);
});
});

View File

@ -66,7 +66,7 @@ RSpec.describe Mutations::Ci::JobTokenScope::RemoveProject, feature_category: :c
it 'executes project removal for the correct direction' do
expect(::Ci::JobTokenScope::RemoveProjectService)
.to receive(:new).with(project, current_user).and_return(service)
expect(service).to receive(:execute).with(target_project, direction: 'inbound')
expect(service).to receive(:execute).with(target_project, 'inbound')
.and_return(instance_double('ServiceResponse', "success?": true))
subject
@ -78,7 +78,7 @@ RSpec.describe Mutations::Ci::JobTokenScope::RemoveProject, feature_category: :c
it 'returns an error response' do
expect(::Ci::JobTokenScope::RemoveProjectService).to receive(:new).with(project, current_user).and_return(service)
expect(service).to receive(:execute).with(target_project, direction: :outbound).and_return(ServiceResponse.error(message: 'The error message'))
expect(service).to receive(:execute).with(target_project, :outbound).and_return(ServiceResponse.error(message: 'The error message'))
expect(subject.fetch(:ci_job_token_scope)).to be_nil
expect(subject.fetch(:errors)).to include("The error message")

View File

@ -0,0 +1,45 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::SchemaValidation::Database, feature_category: :database do
let(:database_name) { 'main' }
let(:database_indexes) do
[['index', 'CREATE UNIQUE INDEX "index" ON public.achievements USING btree (namespace_id, lower(name))']]
end
let(:query_result) { instance_double('ActiveRecord::Result', rows: database_indexes) }
let(:database_model) { Gitlab::Database.database_base_models[database_name] }
let(:connection) { database_model.connection }
subject(:database) { described_class.new(connection) }
before do
allow(connection).to receive(:exec_query).and_return(query_result)
end
describe '#fetch_index_by_name' do
context 'when index does not exist' do
it 'returns nil' do
index = database.fetch_index_by_name('non_existing_index')
expect(index).to be_nil
end
end
it 'returns index by name' do
index = database.fetch_index_by_name('index')
expect(index.name).to eq('index')
end
end
describe '#indexes' do
it 'returns indexes' do
indexes = database.indexes
expect(indexes).to all(be_a(Gitlab::Database::SchemaValidation::Index))
expect(indexes.map(&:name)).to eq(['index'])
end
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::SchemaValidation::Index, feature_category: :database do
let(:index_statement) { 'CREATE INDEX index_name ON public.achievements USING btree (namespace_id)' }
let(:stmt) { PgQuery.parse(index_statement).tree.stmts.first.stmt.index_stmt }
let(:index) { described_class.new(stmt) }
describe '#name' do
it 'returns index name' do
expect(index.name).to eq('index_name')
end
end
describe '#statement' do
it 'returns index statement' do
expect(index.statement).to eq(index_statement)
end
end
end

View File

@ -6,8 +6,8 @@ RSpec.describe Gitlab::Database::SchemaValidation::Indexes, feature_category: :d
let(:structure_file_path) { Rails.root.join('spec/fixtures/structure.sql') }
let(:database_indexes) do
[
['wrong_index', 'CREATE UNIQUE INDEX public.wrong_index ON table_name (column_name)'],
['extra_index', 'CREATE INDEX public.extra_index ON table_name (column_name)'],
['wrong_index', 'CREATE UNIQUE INDEX wrong_index ON public.table_name (column_name)'],
['extra_index', 'CREATE INDEX extra_index ON public.table_name (column_name)'],
['index', 'CREATE UNIQUE INDEX "index" ON public.achievements USING btree (namespace_id, lower(name))']
]
end
@ -20,7 +20,10 @@ RSpec.describe Gitlab::Database::SchemaValidation::Indexes, feature_category: :d
let(:query_result) { instance_double('ActiveRecord::Result', rows: database_indexes) }
subject(:schema_validation) { described_class.new(structure_file_path, database_name) }
let(:database) { Gitlab::Database::SchemaValidation::Database.new(connection) }
let(:structure_file) { Gitlab::Database::SchemaValidation::StructureSql.new(structure_file_path) }
subject(:schema_validation) { described_class.new(structure_file, database) }
before do
allow(connection).to receive(:exec_query).and_return(query_result)

View File

@ -124,8 +124,8 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state
method: 'GET',
path: enabled_path,
status: status_code,
request_urgency: :low,
target_duration_s: 5,
request_urgency: :medium,
target_duration_s: 0.5,
metadata: a_hash_including(
{
'meta.caller_id' => 'Projects::NotesController#index',

View File

@ -5,7 +5,7 @@ require 'rspec-parameterized'
require 'support/helpers/rails_helpers'
RSpec.describe Gitlab::InstrumentationHelper, :clean_gitlab_redis_repository_cache, :clean_gitlab_redis_cache,
feature_category: :scalability do
:use_null_store_as_repository_cache, feature_category: :scalability do
using RSpec::Parameterized::TableSyntax
describe '.add_instrumentation_data', :request_store do
@ -23,44 +23,21 @@ RSpec.describe Gitlab::InstrumentationHelper, :clean_gitlab_redis_repository_cac
expect(payload).to include(db_count: 0, db_cached_count: 0, db_write_count: 0)
end
shared_examples 'make Gitaly calls' do
context 'when Gitaly calls are made' do
it 'adds Gitaly and Redis data' do
project = create(:project)
RequestStore.clear!
project.repository.exists?
context 'when Gitaly calls are made' do
it 'adds Gitaly and Redis data' do
project = create(:project)
RequestStore.clear!
project.repository.exists?
subject
subject
expect(payload[:gitaly_calls]).to eq(1)
expect(payload[:gitaly_duration_s]).to be >= 0
# With MultiStore, the number of `redis_calls` depends on whether primary_store
# (Gitlab::Redis::Repositorycache) and secondary_store (Gitlab::Redis::Cache) are of the same instance.
# In GitLab.com CI, primary and secondary are the same instance, thus only 1 call being made. If primary
# and secondary are different instances, an additional fallback read to secondary_store will be made because
# the first `get` call is a cache miss. Then, the following expect will fail.
expect(payload[:redis_calls]).to eq(1)
expect(payload[:redis_duration_ms]).to be_nil
end
expect(payload[:gitaly_calls]).to eq(1)
expect(payload[:gitaly_duration_s]).to be >= 0
expect(payload[:redis_calls]).to eq(nil)
expect(payload[:redis_duration_ms]).to be_nil
end
end
context 'when multistore ff use_primary_and_secondary_stores_for_repository_cache is enabled' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: true)
end
it_behaves_like 'make Gitaly calls'
end
context 'when multistore ff use_primary_and_secondary_stores_for_repository_cache is disabled' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false)
end
it_behaves_like 'make Gitaly calls'
end
context 'when Redis calls are made' do
it 'adds Redis data and omits Gitaly data' do
stub_rails_env('staging') # to avoid raising CrossSlotError

View File

@ -4,43 +4,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Redis::RepositoryCache, feature_category: :scalability do
include_examples "redis_new_instance_shared_examples", 'repository_cache', Gitlab::Redis::Cache
include_examples "redis_shared_examples"
describe '#pool' do
let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
subject { described_class.pool }
before do
allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
# Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists
allow(Gitlab::Redis::Cache).to receive(:rails_root).and_return(mktmpdir)
allow(Gitlab::Redis::Cache).to receive(:config_file_name).and_return(config_new_format_socket)
end
around do |example|
clear_pool
example.run
ensure
clear_pool
end
it 'instantiates an instance of MultiStore' do
subject.with do |redis_instance|
expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
expect(redis_instance.instance_name).to eq('RepositoryCache')
end
end
it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_repository_cache,
:use_primary_store_as_default_for_repository_cache
end
describe '#raw_config_hash' do
it 'has a legacy default URL' do
@ -49,4 +12,10 @@ RSpec.describe Gitlab::Redis::RepositoryCache, feature_category: :scalability do
expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6380')
end
end
describe '.cache_store' do
it 'has a default ttl of 8 hours' do
expect(described_class.cache_store.options[:expires_in]).to eq(8.hours)
end
end
end

View File

@ -1089,4 +1089,73 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do
it { is_expected.not_to match('random string') }
it { is_expected.not_to match('12321342545356434523412341245452345623453542345234523453245') }
end
describe 'code, html blocks, or html comment blocks regex' do
context 'code blocks' do
subject { described_class::MARKDOWN_CODE_BLOCK_REGEX }
let(:expected) { %(```code\nsome code\n\n>>>\nthat includes a multiline-blockquote\n>>>\n```) }
let(:markdown) do
<<~MARKDOWN
Regular text
```code
some code
>>>
that includes a multiline-blockquote
>>>
```
MARKDOWN
end
it { is_expected.to match(%(```ruby\nsomething\n```)) }
it { is_expected.not_to match(%(must start in first column ```ruby\nsomething\n```)) }
it { is_expected.not_to match(%(```ruby must be multi-line ```)) }
it { expect(subject.match(markdown)[:code]).to eq expected }
end
context 'HTML blocks' do
subject { described_class::MARKDOWN_HTML_BLOCK_REGEX }
let(:expected) { %(<section>\n<p>paragraph</p>\n\n>>>\nthat includes a multiline-blockquote\n>>>\n</section>) }
let(:markdown) do
<<~MARKDOWN
Regular text
<section>
<p>paragraph</p>
>>>
that includes a multiline-blockquote
>>>
</section>
MARKDOWN
end
it { is_expected.to match(%(<section>\nsomething\n</section>)) }
it { is_expected.not_to match(%(must start in first column <section>\nsomething\n</section>)) }
it { is_expected.not_to match(%(<section>must be multi-line</section>)) }
it { expect(subject.match(markdown)[:html]).to eq expected }
end
context 'HTML comment blocks' do
subject { described_class::MARKDOWN_HTML_COMMENT_BLOCK_REGEX }
let(:expected) { %(<!-- the start of an HTML comment\n- [ ] list item commented out\n-->) }
let(:markdown) do
<<~MARKDOWN
Regular text
<!-- the start of an HTML comment
- [ ] list item commented out
-->
MARKDOWN
end
it { is_expected.to match(%(<!--\ncomment\n-->)) }
it { is_expected.not_to match(%(must start in first column <!--\ncomment\n-->)) }
it { expect(subject.match(markdown)[:html_block_comment]).to eq expected }
end
end
end

View File

@ -6,76 +6,51 @@ RSpec.describe Gitlab::RepositoryCache::Preloader, :use_clean_rails_redis_cachin
feature_category: :source_code_management do
let(:projects) { create_list(:project, 2, :repository) }
let(:repositories) { projects.map(&:repository) }
let(:cache) { Gitlab::RepositoryCache.store }
before do
stub_feature_flags(use_primary_store_as_default_for_repository_cache: false)
end
describe '#preload' do
context 'when the values are already cached' do
before do
# Warm the cache but use a different model so they are not memoized
repos = Project.id_in(projects).order(:id).map(&:repository)
shared_examples 'preload' do
describe '#preload' do
context 'when the values are already cached' do
before do
# Warm the cache but use a different model so they are not memoized
repos = Project.id_in(projects).order(:id).map(&:repository)
allow(repos[0].head_tree).to receive(:readme_path).and_return('README.txt')
allow(repos[1].head_tree).to receive(:readme_path).and_return('README.md')
allow(repos[0].head_tree).to receive(:readme_path).and_return('README.txt')
allow(repos[1].head_tree).to receive(:readme_path).and_return('README.md')
repos.map(&:exists?)
repos.map(&:readme_path)
end
it 'prevents individual cache reads for cached methods' do
expect(cache).to receive(:read_multi).once.and_call_original
described_class.new(repositories).preload(
%i[exists? readme_path]
)
expect(cache).not_to receive(:read)
expect(cache).not_to receive(:write)
expect(repositories[0].exists?).to eq(true)
expect(repositories[0].readme_path).to eq('README.txt')
expect(repositories[1].exists?).to eq(true)
expect(repositories[1].readme_path).to eq('README.md')
end
repos.map(&:exists?)
repos.map(&:readme_path)
end
context 'when values are not cached' do
it 'reads and writes from cache individually' do
described_class.new(repositories).preload(
%i[exists? has_visible_content?]
)
it 'prevents individual cache reads for cached methods' do
expect(cache).to receive(:read_multi).once.and_call_original
expect(cache).to receive(:read).exactly(4).times
expect(cache).to receive(:write).exactly(4).times
described_class.new(repositories).preload(
%i[exists? readme_path]
)
repositories.each(&:exists?)
repositories.each(&:has_visible_content?)
end
expect(cache).not_to receive(:read)
expect(cache).not_to receive(:write)
expect(repositories[0].exists?).to eq(true)
expect(repositories[0].readme_path).to eq('README.txt')
expect(repositories[1].exists?).to eq(true)
expect(repositories[1].readme_path).to eq('README.md')
end
end
end
context 'when use_primary_and_secondary_stores_for_repository_cache feature flag is enabled' do
let(:cache) { Gitlab::RepositoryCache.store }
context 'when values are not cached' do
it 'reads and writes from cache individually' do
described_class.new(repositories).preload(
%i[exists? has_visible_content?]
)
before do
stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: true)
expect(cache).to receive(:read).exactly(4).times
expect(cache).to receive(:write).exactly(4).times
repositories.each(&:exists?)
repositories.each(&:has_visible_content?)
end
end
it_behaves_like 'preload'
end
context 'when use_primary_and_secondary_stores_for_repository_cache feature flag is disabled' do
let(:cache) { Rails.cache }
before do
stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false)
end
it_behaves_like 'preload'
end
end

View File

@ -69,35 +69,20 @@ RSpec.describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_cache do
end
end
shared_examples "key?" do
describe "#key?" do
subject { cache.key?(:example, "test") }
describe "#key?" do
subject { cache.key?(:example, "test") }
context "key exists" do
before do
cache.write(:example, test_hash)
end
it { is_expected.to be(true) }
context "key exists" do
before do
cache.write(:example, test_hash)
end
context "key doesn't exist" do
it { is_expected.to be(false) }
end
end
end
context "when both multistore FF is enabled" do
it_behaves_like "key?"
end
context "when both multistore FF is disabled" do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false)
stub_feature_flags(use_primary_store_as_default_for_repository_cache: false)
it { is_expected.to be(true) }
end
it_behaves_like "key?"
context "key doesn't exist" do
it { is_expected.to be(false) }
end
end
describe "#read_members" do

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Taskable do
RSpec.describe Taskable, feature_category: :team_planning do
using RSpec::Parameterized::TableSyntax
describe '.get_tasks' do
@ -13,8 +13,18 @@ RSpec.describe Taskable do
- [x] Second item
* [x] First item
* [ ] Second item
<!-- a comment
- [ ] Item in comment, ignore
rest of comment -->
+ [ ] No-break space (U+00A0)
+ [] Figure space (U+2007)
```
- [ ] Item in code, ignore
```
+ [] Narrow no-break space (U+202F)
+ [] Thin space (U+2009)
MARKDOWN

View File

@ -76,6 +76,15 @@ RSpec.describe 'CiJobTokenScopeRemoveProject', feature_category: :continuous_int
end.to change { Ci::JobToken::ProjectScopeLink.outbound.count }.by(-1)
end
it 'responds successfully' do
post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:ok)
expect(graphql_errors).to be_nil
expect(graphql_data_at(:ciJobTokenScopeRemoveProject, :ciJobTokenScope, :projects, :nodes))
.to contain_exactly({ 'path' => project.path })
end
context 'when invalid target project is provided' do
before do
variables[:target_project_path] = 'unknown/project'

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Profiles::SavedRepliesController, feature_category: :user_profile do
let_it_be(:user) { create(:user) }
before do
sign_in(user)
end
describe 'GET #index' do
describe 'feature flag disabled' do
before do
stub_feature_flags(saved_replies: false)
get '/-/profile/saved_replies'
end
it { expect(response).to have_gitlab_http_status(:not_found) }
end
describe 'feature flag enabled' do
before do
get '/-/profile/saved_replies'
end
it { expect(response).to have_gitlab_http_status(:ok) }
it 'sets hide search settings ivar' do
expect(assigns(:hide_search_settings)).to eq(true)
end
end
end
end

View File

@ -43,7 +43,7 @@ RSpec.describe 'Project noteable notes', feature_category: :team_planning do
expect(Gitlab::Metrics::RailsSlis.request_apdex).to(
receive(:increment).with(
labels: {
request_urgency: :low,
request_urgency: :medium,
feature_category: "team_planning",
endpoint_id: "Projects::NotesController#index"
},
@ -57,8 +57,8 @@ RSpec.describe 'Project noteable notes', feature_category: :team_planning do
'process_action.action_controller',
a_hash_including(
{
request_urgency: :low,
target_duration_s: 5,
request_urgency: :medium,
target_duration_s: 0.5,
metadata: a_hash_including({
'meta.feature_category' => 'team_planning',
'meta.caller_id' => "Projects::NotesController#index"

View File

@ -18,6 +18,7 @@ RSpec.describe CodequalityDegradationEntity do
expect(subject[:file_path]).to eq("file_a.rb")
expect(subject[:line]).to eq(10)
expect(subject[:web_url]).to eq("http://localhost/root/test-project/-/blob/f572d396fae9206628714fb2ce00f72e94f2258f/file_a.rb#L10")
expect(subject[:engine_name]).to eq('structure')
end
end
@ -30,6 +31,7 @@ RSpec.describe CodequalityDegradationEntity do
expect(subject[:file_path]).to eq("file_b.rb")
expect(subject[:line]).to eq(10)
expect(subject[:web_url]).to eq("http://localhost/root/test-project/-/blob/f572d396fae9206628714fb2ce00f72e94f2258f/file_b.rb#L10")
expect(subject[:engine_name]).to eq('rubocop')
end
end
@ -46,6 +48,7 @@ RSpec.describe CodequalityDegradationEntity do
expect(subject[:file_path]).to eq("file_b.rb")
expect(subject[:line]).to eq(10)
expect(subject[:web_url]).to eq("http://localhost/root/test-project/-/blob/f572d396fae9206628714fb2ce00f72e94f2258f/file_b.rb#L10")
expect(subject[:engine_name]).to eq('rubocop')
end
end
end

View File

@ -23,7 +23,7 @@ RSpec.describe Ci::JobTokenScope::RemoveProjectService, feature_category: :conti
end
describe '#execute' do
subject(:result) { service.execute(target_project) }
subject(:result) { service.execute(target_project, :outbound) }
it_behaves_like 'editable job token scope' do
context 'when user has permissions on source and target project' do

View File

@ -2,11 +2,12 @@
require 'spec_helper'
RSpec.describe Projects::ImportExport::ExportService do
RSpec.describe Projects::ImportExport::ExportService, feature_category: :importers do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:project) { create(:project, group: group) }
let(:project) { create(:project) }
let(:shared) { project.import_export_shared }
let!(:after_export_strategy) { Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy.new }
@ -220,5 +221,21 @@ RSpec.describe Projects::ImportExport::ExportService do
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error).with_message(expected_message)
end
end
it "avoids N+1 when exporting project members" do
group.add_owner(user)
group.add_maintainer(create(:user))
project.add_maintainer(create(:user))
# warm up
service.execute
control = ActiveRecord::QueryRecorder.new { service.execute }
group.add_maintainer(create(:user))
project.add_maintainer(create(:user))
expect { service.execute }.not_to exceed_query_limit(control)
end
end
end

View File

@ -25,4 +25,10 @@ RSpec.configure do |config|
instance_class.with(&:flushdb)
end
end
config.before(:each, :use_null_store_as_repository_cache) do |example|
null_store = ActiveSupport::Cache::NullStore.new
allow(Gitlab::Redis::RepositoryCache).to receive(:cache_store).and_return(null_store)
end
end

Some files were not shown because too many files have changed in this diff Show More