Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-04-26 15:10:20 +00:00
parent e5e0589e09
commit aad3ac9e5e
105 changed files with 2371 additions and 706 deletions

View File

@ -680,25 +680,25 @@ rspec migration pg12:
extends:
- .rspec-base-pg12
- .rspec-base-migration
- .rails:rules:default-branch-schedule-nightly--code-backstage
- .rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage
- .rspec-migration-parallel
rspec unit pg12:
extends:
- .rspec-base-pg12
- .rails:rules:default-branch-schedule-nightly--code-backstage
- .rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage
- .rspec-unit-parallel
rspec integration pg12:
extends:
- .rspec-base-pg12
- .rails:rules:default-branch-schedule-nightly--code-backstage
- .rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage
- .rspec-integration-parallel
rspec system pg12:
extends:
- .rspec-base-pg12
- .rails:rules:default-branch-schedule-nightly--code-backstage
- .rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage
- .rspec-system-parallel
# EE/FOSS: default branch nightly scheduled jobs #
##########################################
@ -709,42 +709,42 @@ rspec-ee migration pg12:
extends:
- .rspec-ee-base-pg12
- .rspec-base-migration
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage-ee-only
- .rspec-ee-migration-parallel
rspec-ee unit pg12:
extends:
- .rspec-ee-base-pg12
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage-ee-only
- .rspec-ee-unit-parallel
rspec-ee integration pg12:
extends:
- .rspec-ee-base-pg12
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage-ee-only
- .rspec-ee-integration-parallel
rspec-ee system pg12:
extends:
- .rspec-ee-base-pg12
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage-ee-only
- .rspec-ee-system-parallel
rspec-ee unit pg12 geo:
extends:
- .rspec-ee-base-geo-pg12
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage-ee-only
- .rspec-ee-unit-geo-parallel
rspec-ee integration pg12 geo:
extends:
- .rspec-ee-base-geo-pg12
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage-ee-only
rspec-ee system pg12 geo:
extends:
- .rspec-ee-base-geo-pg12
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage-ee-only
# EE: default branch nightly scheduled jobs #
#####################################

View File

@ -915,16 +915,18 @@
allow_failure: true
- <<: *if-merge-request-title-run-all-rspec
.rails:rules:default-branch-schedule-nightly--code-backstage:
.rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage:
rules:
- <<: *if-default-branch-schedule-2-hourly
- <<: *if-default-branch-schedule-nightly
- <<: *if-merge-request
changes: [".gitlab/ci/rails.gitlab-ci.yml"]
.rails:rules:default-branch-schedule-nightly--code-backstage-ee-only:
.rails:rules:default-branch-schedule-2-hourly-nightly--code-backstage-ee-only:
rules:
- <<: *if-not-ee
when: never
- <<: *if-default-branch-schedule-2-hourly
- <<: *if-default-branch-schedule-nightly
- <<: *if-merge-request
changes: [".gitlab/ci/rails.gitlab-ci.yml"]

View File

@ -126,12 +126,6 @@ Rails/SaveBang:
- 'ee/spec/services/status_page/trigger_publish_service_spec.rb'
- 'ee/spec/services/todo_service_spec.rb'
- 'ee/spec/services/vulnerability_feedback/create_service_spec.rb'
- 'ee/spec/support/protected_tags/access_control_shared_examples.rb'
- 'ee/spec/support/shared_examples/features/protected_branches_access_control_shared_examples.rb'
- 'ee/spec/support/shared_examples/finders/geo/framework_registry_finder_shared_examples.rb'
- 'ee/spec/support/shared_examples/graphql/geo/geo_registries_resolver_shared_examples.rb'
- 'ee/spec/support/shared_examples/lib/analytics/common_merge_request_metrics_refresh_shared_examples.rb'
- 'ee/spec/support/shared_examples/policies/protected_environments_shared_examples.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/push_mirroring_over_http_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/push_mirroring_lfs_over_http_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/pull_mirroring_over_http_spec.rb'
@ -392,15 +386,7 @@ Rails/TimeZone:
# WIP: https://gitlab.com/gitlab-org/gitlab/-/issues/325836
RSpec/EmptyLineAfterFinalLetItBe:
Exclude:
- ee/spec/controllers/groups/analytics/cycle_analytics/stages_controller_spec.rb
- ee/spec/controllers/groups/analytics/cycle_analytics/summary_controller_spec.rb
- ee/spec/controllers/groups/analytics/cycle_analytics/value_streams_controller_spec.rb
- ee/spec/controllers/groups/analytics/tasks_by_type_controller_spec.rb
- ee/spec/controllers/groups/autocomplete_sources_controller_spec.rb
- ee/spec/controllers/groups/insights_controller_spec.rb
- ee/spec/controllers/groups/todos_controller_spec.rb
- ee/spec/controllers/subscriptions_controller_spec.rb
- ee/spec/features/boards/group_boards/multiple_boards_spec.rb
- ee/spec/features/ci_shared_runner_warnings_spec.rb
- ee/spec/features/groups/groups_security_credentials_spec.rb
- ee/spec/features/groups/hooks/user_edits_hooks_spec.rb

View File

@ -1 +1 @@
7b3856441543419869962d34439efbda2fc00024
585cfd46d6be12237b640d19cbd730f3065e0ecc

View File

@ -17,10 +17,8 @@ export default {
};
</script>
<template>
<div
class="gl-display-flex gl-flex-direction-column gl-p-3 gl-border-solid gl-border-1 gl-border-gray-200 gl-rounded-base"
>
<top-toolbar class="gl-mb-3" :editor="editor" />
<editor-content class="md" :editor="editor" />
<div class="md md-area" :class="{ 'is-focused': editor.focused }">
<top-toolbar class="gl-mb-4" :editor="editor" />
<editor-content :editor="editor" />
</div>
</template>

View File

@ -18,7 +18,12 @@ import { PROVIDE_SERIALIZER_OR_RENDERER_ERROR } from '../constants';
import CodeBlockHighlight from '../extensions/code_block_highlight';
import createMarkdownSerializer from './markdown_serializer';
const createEditor = async ({ content, renderMarkdown, serializer: customSerializer } = {}) => {
const createEditor = async ({
content,
renderMarkdown,
serializer: customSerializer,
...options
} = {}) => {
if (!customSerializer && !isFunction(renderMarkdown)) {
throw new Error(PROVIDE_SERIALIZER_OR_RENDERER_ERROR);
}
@ -41,14 +46,10 @@ const createEditor = async ({ content, renderMarkdown, serializer: customSeriali
],
editorProps: {
attributes: {
/*
* Adds some padding to the contenteditable element where the user types.
* Otherwise, the text cursor is not visible when its position is at the
* beginning of a line.
*/
class: 'gl-py-4 gl-px-5',
class: 'gl-outline-0!',
},
},
...options,
});
const serializer = customSerializer || createMarkdownSerializer({ render: renderMarkdown });

View File

@ -1,9 +1,11 @@
<script>
import { GlForm, GlIcon, GlLink, GlButton, GlSprintf } from '@gitlab/ui';
import { GlForm, GlIcon, GlLink, GlButton, GlSprintf, GlAlert, GlLoadingIcon } from '@gitlab/ui';
import axios from '~/lib/utils/axios_utils';
import csrf from '~/lib/utils/csrf';
import { setUrlFragment } from '~/lib/utils/url_utility';
import { __, s__, sprintf } from '~/locale';
import { s__, sprintf } from '~/locale';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
const MARKDOWN_LINK_TEXT = {
markdown: '[Link Title](page-slug)',
@ -14,20 +16,31 @@ const MARKDOWN_LINK_TEXT = {
export default {
components: {
GlAlert,
GlForm,
GlSprintf,
GlIcon,
GlLink,
GlButton,
MarkdownField,
GlLoadingIcon,
ContentEditor: () =>
import(
/* webpackChunkName: 'content_editor' */ '~/content_editor/components/content_editor.vue'
),
},
mixins: [glFeatureFlagMixin()],
inject: ['formatOptions', 'pageInfo'],
data() {
return {
title: this.pageInfo.title?.trim() || '',
format: this.pageInfo.format || 'markdown',
content: this.pageInfo.content?.trim() || '',
isContentEditorLoading: true,
useContentEditor: false,
commitMessage: '',
editor: null,
isDirty: false,
};
},
computed: {
@ -52,7 +65,7 @@ export default {
return MARKDOWN_LINK_TEXT[this.format];
},
submitButtonText() {
if (this.pageInfo.persisted) return __('Save changes');
if (this.pageInfo.persisted) return s__('WikiPage|Save changes');
return s__('WikiPage|Create page');
},
cancelFormPath() {
@ -62,20 +75,50 @@ export default {
wikiSpecificMarkdownHelpPath() {
return setUrlFragment(this.pageInfo.markdownHelpPath, 'wiki-specific-markdown');
},
isMarkdownFormat() {
return this.format === 'markdown';
},
showContentEditorButton() {
return this.isMarkdownFormat && !this.useContentEditor && this.glFeatures.wikiContentEditor;
},
isContentEditorActive() {
return this.isMarkdownFormat && this.useContentEditor;
},
},
mounted() {
this.updateCommitMessage();
window.addEventListener('beforeunload', this.onPageUnload);
},
destroyed() {
window.removeEventListener('beforeunload', this.onPageUnload);
},
methods: {
getContentHTML(content) {
return axios
.post(this.pageInfo.markdownPreviewPath, { text: content })
.then(({ data }) => data.body);
},
handleFormSubmit() {
window.removeEventListener('beforeunload', this.onBeforeUnload);
if (this.useContentEditor) {
this.content = this.editor.getSerializedContent();
}
this.isDirty = false;
},
handleContentChange() {
window.addEventListener('beforeunload', this.onBeforeUnload);
this.isDirty = true;
},
onBeforeUnload() {
onPageUnload(event) {
if (!this.isDirty) return undefined;
event.preventDefault();
// eslint-disable-next-line no-param-reassign
event.returnValue = '';
return '';
},
@ -88,6 +131,28 @@ export default {
const newCommitMessage = sprintf(this.commitMessageI18n, { pageTitle: newTitle }, false);
this.commitMessage = newCommitMessage;
},
async initContentEditor() {
this.isContentEditorLoading = true;
this.useContentEditor = true;
const createEditor = await import(
/* webpackChunkName: 'content_editor' */ '~/content_editor/services/create_editor'
);
this.editor =
this.editor ||
(await createEditor.default({
renderMarkdown: (markdown) => this.getContentHTML(markdown),
onUpdate: () => this.handleContentChange(),
}));
await this.editor.setSerializedContent(this.content);
this.isContentEditorLoading = false;
},
switchToOldEditor() {
this.useContentEditor = false;
},
},
};
</script>
@ -99,6 +164,30 @@ export default {
class="wiki-form common-note-form gl-mt-3 js-quick-submit"
@submit="handleFormSubmit"
>
<gl-alert
v-if="isContentEditorActive"
class="gl-mb-6"
:dismissible="false"
variant="danger"
:primary-button-text="s__('WikiPage|Switch to old editor')"
@primaryAction="switchToOldEditor()"
>
<p>
{{
s__(
"WikiPage|You are editing this page with Content Editor. This editor is in beta and may not display the page's contents properly.",
)
}}
</p>
<p>
{{
s__(
"WikiPage|Switching to the old editor will discard any changes you've made in the new editor.",
)
}}
</p>
</gl-alert>
<input :value="csrfToken" type="hidden" name="authenticity_token" />
<input v-if="pageInfo.persisted" type="hidden" name="_method" value="put" />
<input
@ -135,8 +224,8 @@ export default {
'WikiPage|Tip: You can specify the full path for the new file. We will automatically create any missing directories.',
)
}}
<gl-link :href="helpPath" target="_blank" data-testid="wiki-title-help-link"
><gl-icon name="question-o" /> {{ __('More Information.') }}</gl-link
<gl-link :href="helpPath" target="_blank"
><gl-icon name="question-o" /> {{ s__('WikiPage|More Information.') }}</gl-link
>
</span>
</div>
@ -147,12 +236,26 @@ export default {
s__('WikiPage|Format')
}}</label>
</div>
<div class="col-sm-10">
<select id="wiki_format" v-model="format" class="form-control" name="wiki[format]">
<div class="col-sm-10 gl-display-flex gl-flex-wrap">
<select
id="wiki_format"
v-model="format"
class="form-control"
name="wiki[format]"
:disabled="isContentEditorActive"
>
<option v-for="(key, label) of formatOptions" :key="key" :value="key">
{{ label }}
</option>
</select>
<gl-button
v-if="showContentEditorButton"
category="secondary"
variant="confirm"
class="gl-mt-4"
@click="initContentEditor"
>{{ s__('WikiPage|Use new editor') }}</gl-button
>
</div>
</div>
<div class="form-group row">
@ -163,6 +266,7 @@ export default {
</div>
<div class="col-sm-10">
<markdown-field
v-if="!isContentEditorActive"
:markdown-preview-path="pageInfo.markdownPreviewPath"
:can-attach-file="true"
:enable-autocomplete="true"
@ -189,10 +293,17 @@ export default {
</textarea>
</template>
</markdown-field>
<div v-if="isContentEditorActive">
<gl-loading-icon v-if="isContentEditorLoading" class="bordered-box gl-w-full gl-py-6" />
<content-editor v-else :editor="editor" />
<input id="wiki_content" v-model.trim="content" type="hidden" name="wiki[content]" />
</div>
<div class="clearfix"></div>
<div class="error-alert"></div>
<div class="form-text gl-text-gray-600">
<div v-if="!isContentEditorActive" class="form-text gl-text-gray-600">
<gl-sprintf
:message="
s__(
@ -245,9 +356,7 @@ export default {
:disabled="!content || !title"
>{{ submitButtonText }}</gl-button
>
<gl-button :href="cancelFormPath" class="float-right" data-testid="wiki-cancel-button">{{
__('Cancel')
}}</gl-button>
<gl-button :href="cancelFormPath" class="float-right">{{ s__('WikiPage|Cancel') }}</gl-button>
</div>
</gl-form>
</template>

View File

@ -93,6 +93,7 @@ const fileExtensionIcons = {
pdf: 'pdf',
xlsx: 'table',
xls: 'table',
ods: 'table',
csv: 'table',
tsv: 'table',
vscodeignore: 'vscode',
@ -154,6 +155,7 @@ const fileExtensionIcons = {
gradle: 'gradle',
doc: 'word',
docx: 'word',
odt: 'word',
rtf: 'word',
cer: 'certificate',
cert: 'certificate',
@ -204,6 +206,7 @@ const fileExtensionIcons = {
pps: 'powerpoint',
ppam: 'powerpoint',
ppa: 'powerpoint',
odp: 'powerpoint',
webm: 'movie',
mkv: 'movie',
flv: 'movie',

View File

@ -95,7 +95,7 @@ $column-right-gradient: linear-gradient(to right, $gradient-dark-gray 0%, $gradi
@include gl-font-weight-normal;
&.label-dark {
@include gl-text-gray-900;
color: var(--gray-900, $gray-900);
}
&.label-bold {

View File

@ -6,4 +6,8 @@ class Projects::WikisController < Projects::ApplicationController
alias_method :container, :project
feature_category :wiki
before_action do
push_frontend_feature_flag(:wiki_content_editor, project, default_enabled: :yaml)
end
end

View File

@ -4,6 +4,24 @@ module Resolvers
class GroupPackagesResolver < BaseResolver
type Types::Packages::PackageType.connection_type, null: true
argument :sort, Types::Packages::PackageGroupSortEnum,
description: 'Sort packages by this criteria.',
required: false,
default_value: :created_desc
SORT_TO_PARAMS_MAP = {
created_desc: { order_by: 'created', sort: 'desc' },
created_asc: { order_by: 'created', sort: 'asc' },
name_desc: { order_by: 'name', sort: 'desc' },
name_asc: { order_by: 'name', sort: 'asc' },
version_desc: { order_by: 'version', sort: 'desc' },
version_asc: { order_by: 'version', sort: 'asc' },
type_desc: { order_by: 'type', sort: 'desc' },
type_asc: { order_by: 'type', sort: 'asc' },
project_path_desc: { order_by: 'project_path', sort: 'desc' },
project_path_asc: { order_by: 'project_path', sort: 'asc' }
}.freeze
def ready?(**args)
context[self.class] ||= { executions: 0 }
context[self.class][:executions] += 1
@ -12,10 +30,10 @@ module Resolvers
super
end
def resolve(**args)
def resolve(sort:)
return unless packages_available?
::Packages::GroupPackagesFinder.new(current_user, object).execute
::Packages::GroupPackagesFinder.new(current_user, object, SORT_TO_PARAMS_MAP[sort]).execute
end
private

View File

@ -4,10 +4,26 @@ module Resolvers
class ProjectPackagesResolver < BaseResolver
type Types::Packages::PackageType.connection_type, null: true
def resolve(**args)
argument :sort, Types::Packages::PackageSortEnum,
description: 'Sort packages by this criteria.',
required: false,
default_value: :created_desc
SORT_TO_PARAMS_MAP = {
created_desc: { order_by: 'created', sort: 'desc' },
created_asc: { order_by: 'created', sort: 'asc' },
name_desc: { order_by: 'name', sort: 'desc' },
name_asc: { order_by: 'name', sort: 'asc' },
version_desc: { order_by: 'version', sort: 'desc' },
version_asc: { order_by: 'version', sort: 'asc' },
type_desc: { order_by: 'type', sort: 'desc' },
type_asc: { order_by: 'type', sort: 'asc' }
}.freeze
def resolve(sort:)
return unless packages_available?
::Packages::PackagesFinder.new(object).execute
::Packages::PackagesFinder.new(object, SORT_TO_PARAMS_MAP.fetch(sort)).execute
end
private

View File

@ -130,7 +130,10 @@ module Types
field :milestone, Types::MilestoneType, null: true,
description: 'The milestone of the merge request.'
field :assignees, Types::UserType.connection_type, null: true, complexity: 5,
field :assignees,
type: Types::MergeRequests::AssigneeType.connection_type,
null: true,
complexity: 5,
description: 'Assignees of the merge request.'
field :reviewers,
type: Types::MergeRequests::ReviewerType.connection_type,

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
module Types
module MergeRequests
class AssigneeType < ::Types::UserType
include FindClosest
include ::Types::MergeRequests::InteractsWithMergeRequest
graphql_name 'MergeRequestAssignee'
description 'A user assigned to a merge request.'
authorize :read_user
end
end
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
module Types
module MergeRequests
module InteractsWithMergeRequest
extend ActiveSupport::Concern
included do
field :merge_request_interaction,
type: ::Types::UserMergeRequestInteractionType,
null: true,
extras: [:parent],
description: "Details of this user's interactions with the merge request."
end
def merge_request_interaction(parent:)
merge_request = closest_parent(::Types::MergeRequestType, parent)
return unless merge_request
Users::MergeRequestInteraction.new(user: object, merge_request: merge_request)
end
end
end
end

View File

@ -4,23 +4,11 @@ module Types
module MergeRequests
class ReviewerType < ::Types::UserType
include FindClosest
include ::Types::MergeRequests::InteractsWithMergeRequest
graphql_name 'MergeRequestReviewer'
description 'A user from whom a merge request review has been requested.'
description 'A user assigned to a merge request as a reviewer.'
authorize :read_user
field :merge_request_interaction,
type: ::Types::UserMergeRequestInteractionType,
null: true,
extras: [:parent],
description: "Details of this user's interactions with the merge request."
def merge_request_interaction(parent:)
merge_request = closest_parent(::Types::MergeRequestType, parent)
return unless merge_request
Users::MergeRequestInteraction.new(user: object, merge_request: merge_request)
end
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
module Types
module Packages
class PackageGroupSortEnum < PackageSortEnum
graphql_name 'PackageGroupSort'
description 'Values for sorting group packages'
# The following enums are not available till we enable the new Arel node:
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/58657#note_552632305
# value 'PROJECT_PATH_DESC', 'Project by descending order.', value: :project_path_desc
# value 'PROJECT_PATH_ASC', 'Project by ascending order.', value: :project_path_asc
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
module Types
module Packages
class PackageSortEnum < BaseEnum
graphql_name 'PackageSort'
description 'Values for sorting package'
value 'CREATED_DESC', 'Ordered by created_at in descending order.', value: :created_desc
value 'CREATED_ASC', 'Ordered by created_at in ascending order.', value: :created_asc
value 'NAME_DESC', 'Ordered by name in descending order.', value: :name_desc
value 'NAME_ASC', 'Ordered by name in ascending order.', value: :name_asc
value 'VERSION_DESC', 'Ordered by version in descending order.', value: :version_desc
value 'VERSION_ASC', 'Ordered by version in ascending order.', value: :version_asc
value 'TYPE_DESC', 'Ordered by type in descending order.', value: :type_desc
value 'TYPE_ASC', 'Ordered by type in ascending order.', value: :type_asc
end
end
end

View File

@ -79,8 +79,14 @@ module Types
field :issue, Types::IssueType,
null: true,
description: 'Find an Issue.' do
argument :id, ::Types::GlobalIDType[::Issue], required: true, description: 'The global ID of the Issue.'
description: 'Find an issue.' do
argument :id, ::Types::GlobalIDType[::Issue], required: true, description: 'The global ID of the issue.'
end
field :merge_request, Types::MergeRequestType,
null: true,
description: 'Find a merge request.' do
argument :id, ::Types::GlobalIDType[::MergeRequest], required: true, description: 'The global ID of the merge request.'
end
field :instance_statistics_measurements,
@ -119,6 +125,13 @@ module Types
GitlabSchema.find_by_gid(id)
end
def merge_request(id:)
# TODO: remove this line when the compatibility layer is removed
# See: https://gitlab.com/gitlab-org/gitlab/-/issues/257883
id = ::Types::GlobalIDType[::MergeRequest].coerce_isolated_input(id)
GitlabSchema.find_by_gid(id)
end
def milestone(id:)
# TODO: remove this line when the compatibility layer is removed
# See: https://gitlab.com/gitlab-org/gitlab/-/issues/257883

View File

@ -0,0 +1,112 @@
# frozen_string_literal: true
module Types
module UserInterface
include Types::BaseInterface
graphql_name 'User'
description 'Representation of a GitLab user.'
field :user_permissions,
type: Types::PermissionTypes::User,
description: 'Permissions for the current user on the resource.',
null: false,
method: :itself
field :id,
type: GraphQL::ID_TYPE,
null: false,
description: 'ID of the user.'
field :bot,
type: GraphQL::BOOLEAN_TYPE,
null: false,
description: 'Indicates if the user is a bot.',
method: :bot?
field :username,
type: GraphQL::STRING_TYPE,
null: false,
description: 'Username of the user. Unique within this instance of GitLab.'
field :name,
type: GraphQL::STRING_TYPE,
null: false,
description: 'Human-readable name of the user.'
field :state,
type: Types::UserStateEnum,
null: false,
description: 'State of the user.'
field :email,
type: GraphQL::STRING_TYPE,
null: true,
description: 'User email.', method: :public_email,
deprecated: { reason: :renamed, replacement: 'User.publicEmail', milestone: '13.7' }
field :public_email,
type: GraphQL::STRING_TYPE,
null: true,
description: "User's public email."
field :avatar_url,
type: GraphQL::STRING_TYPE,
null: true,
description: "URL of the user's avatar."
field :web_url,
type: GraphQL::STRING_TYPE,
null: false,
description: 'Web URL of the user.'
field :web_path,
type: GraphQL::STRING_TYPE,
null: false,
description: 'Web path of the user.'
field :todos,
resolver: Resolvers::TodoResolver,
description: 'To-do items of the user.'
field :group_memberships,
type: Types::GroupMemberType.connection_type,
null: true,
description: 'Group memberships of the user.'
field :group_count,
resolver: Resolvers::Users::GroupCountResolver,
description: 'Group count for the user.',
feature_flag: :user_group_counts
field :status,
type: Types::UserStatusType,
null: true,
description: 'User status.'
field :location,
type: ::GraphQL::STRING_TYPE,
null: true,
description: 'The location of the user.'
field :project_memberships,
type: Types::ProjectMemberType.connection_type,
null: true,
description: 'Project memberships of the user.'
field :starred_projects,
description: 'Projects starred by the user.',
resolver: Resolvers::UserStarredProjectsResolver
# Merge request field: MRs can be authored, assigned, or assigned-for-review:
field :authored_merge_requests,
resolver: Resolvers::AuthoredMergeRequestsResolver,
description: 'Merge requests authored by the user.'
field :assigned_merge_requests,
resolver: Resolvers::AssignedMergeRequestsResolver,
description: 'Merge requests assigned to the user.'
field :review_requested_merge_requests,
resolver: Resolvers::ReviewRequestedMergeRequestsResolver,
description: 'Merge requests assigned to the user for review.'
field :snippets,
description: 'Snippets authored by the user.',
resolver: Resolvers::Users::SnippetsResolver
field :callouts,
Types::UserCalloutType.connection_type,
null: true,
description: 'User callouts that belong to the user.'
definition_methods do
def resolve_type(object, context)
# in the absense of other information, we cannot tell - just default to
# the core user type.
::Types::UserType
end
end
end
end

View File

@ -1,102 +1,13 @@
# frozen_string_literal: true
module Types
class UserType < BaseObject
graphql_name 'User'
description 'Representation of a GitLab user.'
class UserType < ::Types::BaseObject
graphql_name 'UserCore'
description 'Core represention of a GitLab user.'
implements ::Types::UserInterface
authorize :read_user
present_using UserPresenter
expose_permissions Types::PermissionTypes::User
field :id,
type: GraphQL::ID_TYPE,
null: false,
description: 'ID of the user.'
field :bot,
type: GraphQL::BOOLEAN_TYPE,
null: false,
description: 'Indicates if the user is a bot.',
method: :bot?
field :username,
type: GraphQL::STRING_TYPE,
null: false,
description: 'Username of the user. Unique within this instance of GitLab.'
field :name,
type: GraphQL::STRING_TYPE,
null: false,
description: 'Human-readable name of the user.'
field :state,
type: Types::UserStateEnum,
null: false,
description: 'State of the user.'
field :email,
type: GraphQL::STRING_TYPE,
null: true,
description: 'User email.', method: :public_email,
deprecated: { reason: :renamed, replacement: 'User.publicEmail', milestone: '13.7' }
field :public_email,
type: GraphQL::STRING_TYPE,
null: true,
description: "User's public email."
field :avatar_url,
type: GraphQL::STRING_TYPE,
null: true,
description: "URL of the user's avatar."
field :web_url,
type: GraphQL::STRING_TYPE,
null: false,
description: 'Web URL of the user.'
field :web_path,
type: GraphQL::STRING_TYPE,
null: false,
description: 'Web path of the user.'
field :todos,
resolver: Resolvers::TodoResolver,
description: 'To-do items of the user.'
field :group_memberships,
type: Types::GroupMemberType.connection_type,
null: true,
description: 'Group memberships of the user.'
field :group_count,
resolver: Resolvers::Users::GroupCountResolver,
description: 'Group count for the user.',
feature_flag: :user_group_counts
field :status,
type: Types::UserStatusType,
null: true,
description: 'User status.'
field :location,
type: ::GraphQL::STRING_TYPE,
null: true,
description: 'The location of the user.'
field :project_memberships,
type: Types::ProjectMemberType.connection_type,
null: true,
description: 'Project memberships of the user.'
field :starred_projects,
description: 'Projects starred by the user.',
resolver: Resolvers::UserStarredProjectsResolver
# Merge request field: MRs can be authored, assigned, or assigned-for-review:
field :authored_merge_requests,
resolver: Resolvers::AuthoredMergeRequestsResolver,
description: 'Merge requests authored by the user.'
field :assigned_merge_requests,
resolver: Resolvers::AssignedMergeRequestsResolver,
description: 'Merge requests assigned to the user.'
field :review_requested_merge_requests,
resolver: Resolvers::ReviewRequestedMergeRequestsResolver,
description: 'Merge requests assigned to the user for review.'
field :snippets,
description: 'Snippets authored by the user.',
resolver: Resolvers::Users::SnippetsResolver
field :callouts,
Types::UserCalloutType.connection_type,
null: true,
description: 'User callouts that belong to the user.'
end
end

View File

@ -122,14 +122,14 @@ class Packages::Package < ApplicationRecord
scope :select_distinct_name, -> { select(:name).distinct }
# Sorting
scope :order_created, -> { reorder('created_at ASC') }
scope :order_created_desc, -> { reorder('created_at DESC') }
scope :order_name, -> { reorder('name ASC') }
scope :order_name_desc, -> { reorder('name DESC') }
scope :order_version, -> { reorder('version ASC') }
scope :order_version_desc, -> { reorder('version DESC') }
scope :order_type, -> { reorder('package_type ASC') }
scope :order_type_desc, -> { reorder('package_type DESC') }
scope :order_created, -> { reorder(created_at: :asc) }
scope :order_created_desc, -> { reorder(created_at: :desc) }
scope :order_name, -> { reorder(name: :asc) }
scope :order_name_desc, -> { reorder(name: :desc) }
scope :order_version, -> { reorder(version: :asc) }
scope :order_version_desc, -> { reorder(version: :desc) }
scope :order_type, -> { reorder(package_type: :asc) }
scope :order_type_desc, -> { reorder(package_type: :desc) }
scope :order_project_name, -> { joins(:project).reorder('projects.name ASC') }
scope :order_project_name_desc, -> { joins(:project).reorder('projects.name DESC') }
scope :order_project_path, -> { joins(:project).reorder('projects.path ASC, id ASC') }

View File

@ -112,3 +112,5 @@ module AlertManagement
end
end
end
AlertManagement::AlertPresenter.prepend_if_ee('EE::AlertManagement::AlertPresenter')

View File

@ -8,7 +8,10 @@ module MergeRequests
# Executed when you do merge via GitLab UI
#
class MergeService < MergeRequests::MergeBaseService
include Gitlab::Utils::StrongMemoize
GENERIC_ERROR_MESSAGE = 'An error occurred while merging'
LEASE_TIMEOUT = 15.minutes.to_i
delegate :merge_jid, :state, to: :@merge_request
@ -18,6 +21,9 @@ module MergeRequests
return
end
return if merge_request.merged?
return unless exclusive_lease(merge_request.id).try_obtain
@merge_request = merge_request
@options = options
@ -34,6 +40,8 @@ module MergeRequests
log_info("Merge process finished on JID #{merge_jid} with state #{state}")
rescue MergeError => e
handle_merge_error(log_message: e.message, save_message_on_model: true)
ensure
exclusive_lease(merge_request.id).cancel
end
private
@ -146,5 +154,13 @@ module MergeRequests
# loaded from the database they're strings
params.with_indifferent_access[:sha] == merge_request.diff_head_sha
end
def exclusive_lease(merge_request_id)
strong_memoize(:"exclusive_lease_#{merge_request_id}") do
lease_key = ['merge_requests_merge_service', merge_request_id].join(':')
Gitlab::ExclusiveLease.new(lease_key, timeout: LEASE_TIMEOUT)
end
end
end
end

View File

@ -12,20 +12,28 @@ module MergeRequests
MAX_RETARGET_MERGE_REQUESTS = 4
def execute(merge_request)
return if merge_request.merged?
# Mark the merge request as merged, everything that happens afterwards is
# executed once
merge_request.mark_as_merged
close_issues(merge_request)
todo_service.merge_merge_request(merge_request, current_user)
create_event(merge_request)
create_note(merge_request)
todo_service.merge_merge_request(merge_request, current_user)
merge_request_activity_counter.track_merge_mr_action(user: current_user)
create_note(merge_request)
close_issues(merge_request)
notification_service.merge_mr(merge_request, current_user)
execute_hooks(merge_request, 'merge')
invalidate_cache_counts(merge_request, users: merge_request.assignees | merge_request.reviewers)
merge_request.update_project_counter_caches
delete_non_latest_diffs(merge_request)
cancel_review_app_jobs!(merge_request)
cleanup_environments(merge_request)
cleanup_refs(merge_request)
execute_hooks(merge_request, 'merge')
end
private

View File

@ -1890,7 +1890,7 @@
:urgency: :high
:resource_boundary: :unknown
:weight: 5
:idempotent:
:idempotent: true
:tags: []
- :name: merge_request_cleanup_refs
:feature_category: :code_review

View File

@ -7,11 +7,19 @@ class MergeWorker # rubocop:disable Scalability/IdempotentWorker
urgency :high
weight 5
loggable_arguments 2
idempotent!
deduplicate :until_executed, including_scheduled: true
def perform(merge_request_id, current_user_id, params)
params = params.with_indifferent_access
current_user = User.find(current_user_id)
merge_request = MergeRequest.find(merge_request_id)
begin
current_user = User.find(current_user_id)
merge_request = MergeRequest.find(merge_request_id)
rescue ActiveRecord::RecordNotFound
return
end
MergeRequests::MergeService.new(merge_request.target_project, current_user, params)
.execute(merge_request)

View File

@ -0,0 +1,5 @@
---
title: Add sorting for group and project packages type
merge_request: 58657
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Add merge request interaction details to MergeRequest.assignees
merge_request: 59770
author:
type: changed

View File

@ -0,0 +1,5 @@
---
title: LibreOffice/OpenOffice file extensions in icon map
merge_request: 59159
author: Holzfeind, Daniel Georg
type: changed

View File

@ -0,0 +1,5 @@
---
title: Allow merge request search via GraphQL
merge_request: 60190
author: Lee Tickett @leetickett
type: added

View File

@ -0,0 +1,5 @@
---
title: Make MergeService idempotent
merge_request: 55368
author:
type: performance

View File

@ -0,0 +1,5 @@
---
title: Remove artifact expiry backfill temp index.
merge_request: 54252
author:
type: changed

View File

@ -0,0 +1,7 @@
---
name: wiki_content_editor
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/57370
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/255919
group: group::editor
type: development
default_enabled: false

View File

@ -1,15 +1,15 @@
# frozen_string_literal: true
if Gitlab::Runtime.puma?
# This patch represents https://github.com/puma/puma/pull/2613. If
# this PR is accepted in the next Puma release, we can remove this
# entire file.
#
# The patch itself is quite large because the tempfile creation in
# Puma is inside these fairly long methods. The actual changes are
# just two lines, commented with 'GitLab' to make them easier to find.
raise "Remove this monkey patch: #{__FILE__}" unless Puma::Const::VERSION == '5.1.1'
# This is copied from https://github.com/puma/puma/blob/v5.1.1/lib/puma/client.rb,
# with two additions: both times we create a temporary file, we immediately
# call `#unlink`. This means that if the process gets terminated without being
# able to clean up itself, the temporary file will not linger on the file
# system. We will try to get this patch accepted upstream if it works for us
# (we just need to check if the temporary file responds to `#unlink` as that
# won't work on Windows, for instance).
module Puma
class Client
private
@ -65,7 +65,7 @@ if Gitlab::Runtime.puma?
if remain > MAX_BODY
@body = Tempfile.new(Const::PUMA_TMP_BASE)
@body.binmode
@body.unlink # This is the changed part
@body.unlink # GitLab: this is the changed part
@tempfile = @body
else
# The body[0,0] trick is to get an empty string in the same
@ -87,7 +87,7 @@ if Gitlab::Runtime.puma?
@body = Tempfile.new(Const::PUMA_TMP_BASE)
@body.binmode
@body.unlink # This is the changed part
@body.unlink # GitLab: this is the changed part
@tempfile = @body
@chunked_content_length = 0

View File

@ -1,6 +1,6 @@
---
key_path: usage_activity_by_stage_monthly.verify.ci_builds
description: Unique builds in project
description: Unique monthly builds in project
product_section: ops
product_stage: verify
product_group: group::continuous integration
@ -8,9 +8,12 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: 28d
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -1,6 +1,6 @@
---
key_path: usage_activity_by_stage_monthly.verify.ci_external_pipelines
description: Total pipelines in external repositories
description: Total pipelines in external repositories in a month
product_section: ops
product_stage: verify
product_group: group::continuous integration
@ -8,9 +8,12 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: 28d
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -1,6 +1,6 @@
---
key_path: usage_activity_by_stage_monthly.verify.ci_internal_pipelines
description: Total pipelines in GitLab repositories
description: Total pipelines in GitLab repositories in a month
product_section: ops
product_stage: verify
product_group: group::continuous integration
@ -8,9 +8,11 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: 28d
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -2,15 +2,17 @@
key_path: usage_activity_by_stage_monthly.verify.ci_pipeline_config_auto_devops
description: Total pipelines from an Auto DevOps template
product_section: ops
product_stage: verify
product_group: group::continuous integration
product_category: continuous_integration
product_stage: configure
product_group: group::configure
product_category: auto_devops
value_type: number
status: data_available
time_frame: 28d
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -1,6 +1,6 @@
---
key_path: usage_activity_by_stage_monthly.verify.ci_pipeline_config_repository
description: Total Pipelines from templates in repository
description: Total Monthly Pipelines from templates in repository
product_section: ops
product_stage: verify
product_group: group::continuous integration
@ -8,9 +8,11 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: 28d
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -1,6 +1,6 @@
---
key_path: usage_activity_by_stage_monthly.verify.ci_pipeline_schedules
description: Pipeline schedules in GitLab
description: Total monthly Pipeline schedules in GitLab
product_section: ops
product_stage: verify
product_group: group::continuous integration
@ -8,9 +8,11 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: 28d
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -1,6 +1,6 @@
---
key_path: usage_activity_by_stage_monthly.verify.ci_pipelines
description: " Distinct users triggering pipelines in a month"
description: "Distinct users triggering pipelines in a month"
product_section: ops
product_stage: verify
product_group: group::continuous integration
@ -8,10 +8,12 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: 28d
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
- free

View File

@ -8,7 +8,7 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: 28d
data_source:
data_source: database
distribution:
- ce
- ee
@ -16,4 +16,3 @@ tier:
- free
- premium
- ultimate
skip_validation: true

View File

@ -11,6 +11,8 @@ time_frame: all
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -11,6 +11,8 @@ time_frame: all
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -13,4 +13,3 @@ distribution:
- ce
tier:
- free
skip_validation: true

View File

@ -2,15 +2,17 @@
key_path: counts.ci_pipeline_config_auto_devops
description: Total pipelines from an Auto DevOps template
product_section: ops
product_stage: verify
product_group: group::continuous integration
product_category: continuous_integration
product_stage: configure
product_group: group::configure
product_category: auto_devops
value_type: number
status: data_available
time_frame: all
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -11,6 +11,8 @@ time_frame: all
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -11,6 +11,8 @@ time_frame: all
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -11,6 +11,8 @@ time_frame: all
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -11,6 +11,8 @@ time_frame: all
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -1,6 +1,6 @@
---
key_path: usage_activity_by_stage.verify.ci_builds
description: Unique builds in project
description: Unique count of builds in project
product_section: ops
product_stage: verify
product_group: group::continuous integration
@ -8,9 +8,11 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: all
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -8,9 +8,11 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: all
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -8,9 +8,11 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: all
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -2,15 +2,17 @@
key_path: usage_activity_by_stage.verify.ci_pipeline_config_auto_devops
description: Total pipelines from an Auto DevOps template
product_section: ops
product_stage: verify
product_group: group::continuous integration
product_category: continuous_integration
product_stage: configure
product_group: group::configure
product_category: auto_devops
value_type: number
status: data_available
time_frame: all
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -8,9 +8,12 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: all
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -8,9 +8,11 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: all
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -8,10 +8,11 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: all
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -8,9 +8,11 @@ product_category: continuous_integration
value_type: number
status: data_available
time_frame: all
data_source:
data_source: database
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class RemoveArtifactExpiryTempIndex < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
INDEX_NAME = 'expired_artifacts_temp_index'
INDEX_CONDITION = "expire_at IS NULL AND date(created_at AT TIME ZONE 'UTC') < '2020-06-22'::date"
def up
remove_concurrent_index_by_name :ci_job_artifacts, INDEX_NAME
end
def down
add_concurrent_index(:ci_job_artifacts, %i(id created_at), where: INDEX_CONDITION, name: INDEX_NAME)
end
end

View File

@ -0,0 +1 @@
f72f0a31bca545d2528030019695b03e0858d7ae9a0fb32d407c25580731fa6b

View File

@ -21800,8 +21800,6 @@ CREATE UNIQUE INDEX epic_user_mentions_on_epic_id_and_note_id_index ON epic_user
CREATE UNIQUE INDEX epic_user_mentions_on_epic_id_index ON epic_user_mentions USING btree (epic_id) WHERE (note_id IS NULL);
CREATE INDEX expired_artifacts_temp_index ON ci_job_artifacts USING btree (id, created_at) WHERE ((expire_at IS NULL) AND (date(timezone('UTC'::text, created_at)) < '2020-06-22'::date));
CREATE INDEX finding_evidence_requests_on_finding_evidence_id ON vulnerability_finding_evidence_requests USING btree (vulnerability_finding_evidence_id);
CREATE INDEX finding_evidence_responses_on_finding_evidences_id ON vulnerability_finding_evidence_responses USING btree (vulnerability_finding_evidence_id);

View File

@ -18,7 +18,7 @@ In order to enable the encrypted configuration settings, a new base key needs to
**Omnibus Installation**
Starting with 13.7 the new secret is automatically generated for you, but you will need to ensure your
Starting with 13.7 the new secret is automatically generated for you, but you need to ensure your
`/etc/gitlab/gitlab-secrets.json` contains the same values on all nodes.
**GitLab Cloud Native Helm Chart**
@ -34,4 +34,4 @@ The new secret can be generated by running:
bundle exec rake gitlab:env:info RAILS_ENV=production GITLAB_GENERATE_ENCRYPTED_SETTINGS_KEY_BASE=true
```
This will print general information on the GitLab instance, but will also cause the key to be generated in `<path-to-gitlab-rails>/config/secrets.yml`
This prints general information on the GitLab instance, but also causes the key to be generated in `<path-to-gitlab-rails>/config/secrets.yml`

File diff suppressed because it is too large Load Diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

View File

@ -0,0 +1,205 @@
---
stage: none
group: unassigned
comments: false
description: 'Improve scalability of GitLab CI/CD'
---
# Next CI/CD scale target: 20M builds per day by 2024
## Summary
GitLab CI/CD is one of the most data and compute intensive components of GitLab.
Since its [initial release in November 2012](https://about.gitlab.com/blog/2012/11/13/continuous-integration-server-from-gitlab/),
the CI/CD subsystem has evolved significantly. It was [integrated into GitLab in September 2015](https://about.gitlab.com/releases/2015/09/22/gitlab-8-0-released/)
and has become [one of the most beloved CI/CD solutions](https://about.gitlab.com/blog/2017/09/27/gitlab-leader-continuous-integration-forrester-wave/).
GitLab CI/CD has come a long way since the initial release, but the design of
the data storage for pipeline builds remains almost the same since 2012. We
store all the builds in PostgreSQL in `ci_builds` table, and because we are
creating more than [2 million builds each day on GitLab.com](https://docs.google.com/spreadsheets/d/17ZdTWQMnTHWbyERlvj1GA7qhw_uIfCoI5Zfrrsh95zU),
we are reaching database limits that are slowing our development velocity down.
On February 1st, 2021, a billionth CI/CD job was created and the number of
builds is growing exponentially. We will run out of the available primary keys
for builds before December 2021 unless we improve the database model used to
store CI/CD data.
We expect to see 20M builds created daily on GitLab.com in the first half of
2024.
![ci_builds cumulative with forecast](ci_builds_cumulative_forecast.png)
## Goals
**Enable future growth by making processing 20M builds in a day possible.**
## Challenges
The current state of CI/CD product architecture needs to be updated if we want
to sustain future growth.
### We are running out of the capacity to store primary keys
The primary key in `ci_builds` table is an integer generated in a sequence.
Historically, Rails used to use [integer](https://www.postgresql.org/docs/9.1/datatype-numeric.html)
type when creating primary keys for a table. We did use the default when we
[created the `ci_builds` table in 2012](https://gitlab.com/gitlab-org/gitlab/-/blob/046b28312704f3131e72dcd2dbdacc5264d4aa62/db/ci/migrate/20121004165038_create_builds.rb).
[The behavior of Rails has changed](https://github.com/rails/rails/pull/26266)
since the release of Rails 5. The framework is now using bigint type that is 8
bytes long, however we have not migrated primary keys for `ci_builds` table to
bigint yet.
We will run out of the capacity of the integer type to store primary keys in
`ci_builds` table before December 2021. When it happens without a viable
workaround or an emergency plan, GitLab.com will go down.
`ci_builds` is just one of the tables that are running out of the primary keys
available in Int4 sequence. There are multiple other tables storing CI/CD data
that have the same problem.
Primary keys problem will be tackled by our Database Team.
### The table is too large
There is more than a billion rows in `ci_builds` table. We store more than 2
terabytes of data in that table, and the total size of indexes is more than 1
terabyte (as of February 2021).
This amount of data contributes to a significant performance problems we
experience on our primary PostgreSQL database.
Most of the problem are related to how PostgreSQL database works internally,
and how it is making use of resources on a node the database runs on. We are at
the limits of vertical scaling of the primary database nodes and we frequently
see a negative impact of the `ci_builds` table on the overall performance,
stability, scalability and predictability of the database GitLab.com depends
on.
The size of the table also hinders development velocity because queries that
seem fine in the development environment may not work on GitLab.com. The
difference in the dataset size between the environments makes it difficult to
predict the performance of even the most simple queries.
We also expect a significant, exponential growth in the upcoming years.
One of the forecasts done using [Facebook's
Prophet](https://facebook.github.io/prophet/) shows that in the first half of
2024 we expect seeing 20M builds created on GitLab.com each day. In comparison
to around 2M we see created today, this is 10x growth our product might need to
sustain in upcoming years.
![ci_builds daily forecast](ci_builds_daily_forecast.png)
### Queuing mechanisms are using the large table
Because of how large the table is, mechanisms that we use to build queues of
pending builds (there is more than one queue), are not very efficient. Pending
builds represent a small fraction of what we store in the `ci_builds` table,
yet we need to find them in this big dataset to determine an order in which we
want to process them.
This mechanism is very inefficient, and it has been causing problems on the
production environment frequently. This usually results in a significant drop
of the CI/CD apdex score, and sometimes even causes a significant performance
degradation in the production environment.
There are multiple other strategies that can improve performance and
reliability. We can use [Redis
queuing](https://gitlab.com/gitlab-org/gitlab/-/issues/322972), or [a separate
table that will accelerate SQL queries used to build
queues](https://gitlab.com/gitlab-org/gitlab/-/issues/322766) and we want to
explore them.
### Moving big amounts of data is challenging
We store a significant amount of data in `ci_builds` table. Some of the columns
in that table store a serialized user-provided data. Column `ci_builds.options`
stores more than 600 gigabytes of data, and `ci_builds.yaml_variables` more
than 300 gigabytes (as of February 2021).
It is a lot of data that needs to be reliably moved to a different place.
Unfortunately, right now, our [background
migrations](https://docs.gitlab.com/ee/development/background_migrations.html)
are not reliable enough to migrate this amount of data at scale. We need to
build mechanisms that will give us confidence in moving this data between
columns, tables, partitions or database shards.
Effort to improve background migrations will be owned by our Database Team.
### Development velocity is negatively affected
Team members and the wider community members are struggling to contribute the
Verify area, because we restricted the possibility of extending `ci_builds`
even further. Our static analysis tools prevent adding more columns to this
table. Adding new queries is unpredictable because of the size of the dataset
and the amount of queries executed using the table. This significantly hinders
the development velocity and contributes to incidents on the production
environment.
## Proposal
Making GitLab CI/CD product ready for the scale we expect to see in the
upcoming years is a multi-phase effort.
First, we want to focus on things that are urgently needed right now. We need
to fix primary keys overflow risk and unblock other teams that are working on
database partitioning and sharding.
We want to improve situation around bottlenecks that are known already, like
queuing mechanisms using the large table and things that are holding other
teams back.
Extending CI/CD metrics is important to get a better sense of how the system
performs and to what growth should we expect. This will make it easier for us
to identify bottlenecks and perform more advanced capacity planning.
As we work on first iterations we expect our Database Sharding team and
Database Scalability Working Group to make progress on patterns we will be able
to use to partition the large CI/CD dataset. We consider the strong time-decay
effect, related to the diminishing importance of pipelines with time, as an
opportunity we might want to seize.
## Iterations
Work required to achieve our next CI/CD scaling target is tracked in the
[GitLab CI/CD 20M builds per day scaling
target](https://gitlab.com/groups/gitlab-org/-/epics/5745) epic.
## Status
In progress.
## Who
Proposal:
<!-- vale gitlab.Spelling = NO -->
| Role | Who
|------------------------------|-------------------------|
| Author | Grzegorz Bizon |
| Architecture Evolution Coach | Kamil Trzciński |
| Engineering Leader | Darby Frey |
| Product Manager | Jackie Porter |
| Domain Expert / Verify | Fabio Pitino |
| Domain Expert / Database | Jose Finotto |
| Domain Expert / PostgreSQL | Nikolay Samokhvalov |
DRIs:
| Role | Who
|------------------------------|------------------------|
| Leadership | Darby Frey |
| Product | Jackie Porter |
| Engineering | Grzegorz Bizon |
Domain experts:
| Area | Who
|------------------------------|------------------------|
| Domain Expert / Verify | Fabio Pitino |
| Domain Expert / Database | Jose Finotto |
| Domain Expert / PostgreSQL | Nikolay Samokhvalov |
<!-- vale gitlab.Spelling = YES -->

View File

@ -94,3 +94,5 @@ To see the needs visualization, click on the **Needs** tab when viewing a pipeli
Clicking a node highlights all the job paths it depends on.
![Needs visualization with path highlight](img/dag_graph_example_clicked_v13_1.png)
You can also see `needs` relationships in [full pipeline graphs](../pipelines/index.md#view-full-pipeline-graph).

View File

@ -65,7 +65,7 @@ you can also see the reason it failed on the Job detail page.
The order of jobs in a pipeline depends on the type of pipeline graph.
- For [regular pipeline graphs](../pipelines/index.md#regular-pipeline-graphs), jobs are sorted by name.
- For [full pipeline graphs](../pipelines/index.md#view-full-pipeline-graph), jobs are sorted by name.
- For [pipeline mini graphs](../pipelines/index.md#pipeline-mini-graphs), jobs are sorted by severity and then by name.
The order of severity is:

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

View File

@ -325,23 +325,81 @@ Pipelines can be complex structures with many sequential and parallel jobs.
To make it easier to understand the flow of a pipeline, GitLab has pipeline graphs for viewing pipelines
and their statuses.
Pipeline graphs can be displayed in two different ways, depending on the page you
Pipeline graphs can be displayed as a large graph or a miniature representation, depending on the page you
access the graph from.
GitLab capitalizes the stages' names in the pipeline graphs.
### Regular pipeline graphs
### View full pipeline graph
> - [Visualization improved](https://gitlab.com/gitlab-org/gitlab/-/issues/276949) in GitLab 13.11.
> - [Visualization improvements introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/276949) in GitLab 13.11.
Regular pipeline graphs show the names of the jobs in each stage. Regular pipeline graphs can
be found when you are on a [single pipeline page](#view-pipelines). For example:
The [pipeline details page](#view-pipelines) displays the full pipeline graph of
all the jobs in the pipeline.
![Pipelines example](img/pipelines_v13_11.png)
You can group the jobs by:
- Stage, which arranges jobs in the same stage together in the same column.
![jobs grouped by stage](img/pipelines_graph_stage_view_v13_12.png)
- [Job dependencies](#view-job-dependencies-in-the-pipeline-graph), which arranges
jobs based on their [`needs`](../yaml/README.md#needs) dependencies.
[Multi-project pipeline graphs](../multi_project_pipelines.md#multi-project-pipeline-visualization) help
you visualize the entire pipeline, including all cross-project inter-dependencies. **(PREMIUM)**
### View job dependencies in the pipeline graph
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/298973) in GitLab 13.12.
> - [Deployed behind a feature flag](../../user/feature_flags.md), disabled by default.
> - Disabled on GitLab.com.
> - Not recommended for production use.
> - To use in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-job-dependency-view). **(FREE SELF)**
This in-development feature might not be available for your use. There can be
[risks when enabling features still in development](../../user/feature_flags.md#risks-when-enabling-features-still-in-development).
Refer to this feature's version history for more details.
You can arrange jobs in the pipeline graph based on their [`needs`](../yaml/README.md#needs)
dependencies.
Jobs in the leftmost column run first, and jobs that depend on them are grouped in the next columns.
For example, `build-job2` depends only on jobs in the first column, so it displays
in the second column from the left. `deploy-job2` depends on jobs in both the first
and second column and displays in the third column:
![jobs grouped by needs dependency](img/pipelines_graph_dependency_view_v13_12.png)
To add lines that show the `needs` relationships between jobs, select the **Show dependencies** toggle.
These lines are similar to the [needs visualization](../directed_acyclic_graph/index.md#needs-visualization):
![jobs grouped by needs dependency with lines displayed](img/pipelines_graph_dependency_view_links_v13_12.png)
To see the full `needs` dependency tree for a job, hover over it:
![single job dependency tree highlighted](img/pipelines_graph_dependency_view_hover_v13_12.png)
#### Enable or disable job dependency view **(FREE SELF)**
The job dependency view is under development and not ready for production use. It is
deployed behind a feature flag that is **disabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../administration/feature_flags.md)
can enable it.
To enable it:
```ruby
Feature.enable(:pipeline_graph_layers_view)
```
To disable it:
```ruby
Feature.disable(:pipeline_graph_layers_view)
```
### Pipeline mini graphs
Pipeline mini graphs take less space and can tell you at a
@ -356,6 +414,8 @@ Pipeline mini graphs allow you to see all related jobs for a single commit and t
of each stage of your pipeline. This allows you to quickly see what failed and
fix it.
Pipeline mini graphs only display jobs by stage.
Stages in pipeline mini graphs are collapsible. Hover your mouse over them and click to expand their jobs.
| Mini graph | Mini graph expanded |

View File

@ -486,7 +486,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `counts.ci_external_pipelines`
@ -510,7 +510,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `counts.ci_pipeline_config_auto_devops`
@ -518,11 +518,11 @@ Total pipelines from an Auto DevOps template
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/counts_all/20210216175516_ci_pipeline_config_auto_devops.yml)
Group: `group::continuous integration`
Group: `group::configure`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `counts.ci_pipeline_config_repository`
@ -534,7 +534,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `counts.ci_pipeline_schedules`
@ -546,7 +546,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `counts.ci_runners`
@ -558,7 +558,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `counts.ci_triggers`
@ -570,7 +570,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `counts.clusters`
@ -16128,7 +16128,7 @@ Tiers: `free`
### `usage_activity_by_stage.verify.ci_builds`
Unique builds in project
Unique count of builds in project
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/counts_all/20210216175525_ci_builds.yml)
@ -16136,7 +16136,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage.verify.ci_external_pipelines`
@ -16148,7 +16148,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage.verify.ci_internal_pipelines`
@ -16160,7 +16160,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage.verify.ci_pipeline_config_auto_devops`
@ -16168,11 +16168,11 @@ Total pipelines from an Auto DevOps template
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/counts_all/20210216175531_ci_pipeline_config_auto_devops.yml)
Group: `group::continuous integration`
Group: `group::configure`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage.verify.ci_pipeline_config_repository`
@ -16184,7 +16184,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage.verify.ci_pipeline_schedules`
@ -16196,7 +16196,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage.verify.ci_pipelines`
@ -16208,7 +16208,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage.verify.ci_triggers`
@ -16220,7 +16220,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage.verify.clusters_applications_runner`
@ -18132,7 +18132,7 @@ Tiers: `free`
### `usage_activity_by_stage_monthly.verify.ci_builds`
Unique builds in project
Unique monthly builds in project
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/counts_28d/20210216175542_ci_builds.yml)
@ -18140,11 +18140,11 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage_monthly.verify.ci_external_pipelines`
Total pipelines in external repositories
Total pipelines in external repositories in a month
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/counts_28d/20210216175544_ci_external_pipelines.yml)
@ -18152,11 +18152,11 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage_monthly.verify.ci_internal_pipelines`
Total pipelines in GitLab repositories
Total pipelines in GitLab repositories in a month
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/counts_28d/20210216175546_ci_internal_pipelines.yml)
@ -18164,7 +18164,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage_monthly.verify.ci_pipeline_config_auto_devops`
@ -18172,15 +18172,15 @@ Total pipelines from an Auto DevOps template
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/counts_28d/20210216175548_ci_pipeline_config_auto_devops.yml)
Group: `group::continuous integration`
Group: `group::configure`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage_monthly.verify.ci_pipeline_config_repository`
Total Pipelines from templates in repository
Total Monthly Pipelines from templates in repository
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/counts_28d/20210216175550_ci_pipeline_config_repository.yml)
@ -18188,11 +18188,11 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage_monthly.verify.ci_pipeline_schedules`
Pipeline schedules in GitLab
Total monthly Pipeline schedules in GitLab
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/counts_28d/20210216175552_ci_pipeline_schedules.yml)
@ -18200,11 +18200,11 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage_monthly.verify.ci_pipelines`
Distinct users triggering pipelines in a month
Distinct users triggering pipelines in a month
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/counts_28d/20210216175554_ci_pipelines.yml)
@ -18212,7 +18212,7 @@ Group: `group::continuous integration`
Status: `data_available`
Tiers: `free`
Tiers: `free`, `premium`, `ultimate`, `free`
### `usage_activity_by_stage_monthly.verify.ci_triggers`
@ -18240,7 +18240,7 @@ Tiers: `free`, `premium`, `ultimate`
### `usage_activity_by_stage_monthly.verify.projects_reporting_ci_cd_back_to_github`
Projects with a GitHub service pipeline enabled
Projects with a GitHub repository mirror pipeline enabled
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/config/metrics/counts_28d/20210216175558_projects_reporting_ci_cd_back_to_github.yml)

View File

@ -31,20 +31,20 @@ Because GitLab is already installed in a pre-configured image, all you have to d
create a new VM:
1. [Visit the GitLab offering in the marketplace](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/gitlabinc1586447921813.gitlabee?tab=Overview)
1. Select **Get it now** and you will be presented with the **Create this app in Azure** window.
1. Select **Get it now** and the **Create this app in Azure** window opens.
Select **Continue**.
1. Select one of the following options from the Azure portal:
- Select **Create** to create a VM from scratch.
- Select **Start with a pre-set configuration** to get started with some
pre-configured options. You can modify these configurations at any time.
For the sake of this guide, we'll create the VM from scratch, so
For the sake of this guide, let's create the VM from scratch, so
select **Create**.
NOTE:
Be aware that while your VM is active (known as "allocated"), it incurs
compute charges for which you'll be billed. Even if you're using the
free trial credits, you'll want to know
Be aware that Azure incurs compute charges whenever your VM is
active (known as "allocated"), even if you're using free trial
credits.
[how to properly shutdown an Azure VM to save money](https://build5nines.com/properly-shutdown-azure-vm-to-save-money/).
See the [Azure pricing calculator](https://azure.microsoft.com/en-us/pricing/calculator/)
to learn how much resources can cost.
@ -68,7 +68,7 @@ The first items you need to configure are the basic settings of the underlying v
is covered by the `D4s_v3` size, select that option.
1. Set the authentication type to **SSH public key**.
1. Enter a user name or leave the one that is automatically created. This is
the user you'll use to connect to the VM through SSH. By default, the user
the user Azure uses to connect to the VM through SSH. By default, the user
has root access.
1. Determine if you want to provide your own SSH key or let Azure create one for you.
Read the [SSH documentation](../../ssh/README.md) to learn more about how to set up SSH
@ -103,7 +103,7 @@ The GitLab image in the marketplace has the following ports open by default:
| 22 | Enable our VM to respond to SSH connection requests, allowing public access (with authentication) to remote terminal sessions. |
If you want to change the ports or add any rules, you can do it
after the VM is created by going to the Networking settings in the left sidebar,
after the VM is created by selecting Networking settings in the left sidebar,
while in the VM dashboard.
### Configure the Management tab
@ -126,13 +126,13 @@ resources. You don't need to change the default settings.
The final tab presents you with all of your selected options,
where you can review and modify your choices from the
previous steps. Azure will run validation tests in the background,
previous steps. Azure runs validation tests in the background,
and if you provided all of the required settings, you can
create the VM.
After you select **Create**, if you had opted for Azure to create an SSH key pair
for you, you'll be asked to download the private SSH key. Download the key, as you'll
need it to SSH into the VM.
for you, a prompt appears to download the private SSH key. Download the key, as it's
needed to SSH into the VM.
After you download the key, the deployment begins.
@ -153,11 +153,11 @@ to assign a descriptive DNS name to the VM:
1. From the VM dashboard, select **Configure** under **DNS name**.
1. Enter a descriptive DNS name for your instance in the **DNS name label** field,
for example `gitlab-prod`. This will make the VM accessible at
for example `gitlab-prod`. This makes the VM accessible at
`gitlab-prod.eastus.cloudapp.azure.com`.
1. Select **Save** for the changes to take effect.
Eventually, you'll want to use your own domain name. To do this, you need to add a DNS `A` record
Eventually, most users want to use their own domain name. For you to do this, you need to add a DNS `A` record
with your domain registrar that points to the public IP address of your Azure VM.
You can use [Azure's DNS](https://docs.microsoft.com/en-us/azure/dns/dns-delegate-domain-azure-dns)
or some [other registrar](https://docs.gitlab.com/omnibus/settings/dns.html).
@ -165,15 +165,15 @@ or some [other registrar](https://docs.gitlab.com/omnibus/settings/dns.html).
### Change the GitLab external URL
GitLab uses `external_url` in its configuration file to set up the domain name.
If you don't set this up, when you visit the Azure friendly name, you'll
instead be redirected to the public IP.
If you don't set this up, when you visit the Azure friendly name, the browser will
redirect you to the public IP.
To set up the GitLab external URL:
1. Connect to GitLab through SSH by going to **Settings > Connect** from the VM
dashboard, and follow the instructions. Remember to sign in with the username
and SSH key you specified when you [created the VM](#configure-the-basics-tab).
The Azure VM domain name will be the one you
The Azure VM domain name is the one you
[set up previously](#set-up-a-domain-name). If you didn't set up a domain name for
your VM, you can use the IP address in its place.
@ -189,10 +189,10 @@ To set up the GitLab external URL:
1. Open `/etc/gitlab/gitlab.rb` with your editor.
1. Find `external_url` and replace it with your own domain name. For the sake
of this example, we'll use the friendly domain name that Azure set up.
If you use `https` in the URL, Let's Encrypt will be
[automatically enabled](https://docs.gitlab.com/omnibus/settings/ssl.html#lets-encrypt-integration),
and you'll have HTTPS by default:
of this example, use the default domain name Azure sets up.
Using `https` in the URL
[automatically enables](https://docs.gitlab.com/omnibus/settings/ssl.html#lets-encrypt-integration),
Let's Encrypt, and sets HTTPS by default:
```ruby
external_url 'https://gitlab-prod.eastus.cloudapp.azure.com'
@ -221,7 +221,7 @@ You can now visit GitLab with your browser at the new external URL.
Use the domain name you set up earlier to visit your new GitLab instance
in your browser. In this example, it's `https://gitlab-prod.eastus.cloudapp.azure.com`.
The first thing you'll see is the sign-in page. GitLab creates an admin user by default.
The first thing that appears is the sign-in page. GitLab creates an admin user by default.
The credentials are:
- Username: `root`
@ -239,7 +239,7 @@ in this section whenever you need to update GitLab.
### Check the current version
To determine the version of GitLab you're currently running,
go to the **{admin}** **Admin Area**, and you will find the version
go to the **{admin}** **Admin Area**, and find the version
under the **Components** table.
If there's a newer available version of GitLab that contains one or more
@ -259,7 +259,7 @@ To update GitLab to the latest version:
```
This command updates GitLab and its associated components to the latest versions,
and can take time to complete. You'll see various update tasks being
and can take time to complete. During this time, the terminal shows various update tasks being
completed in your terminal.
NOTE:
@ -267,8 +267,8 @@ To update GitLab to the latest version:
`E: The repository 'https://packages.gitlab.com/gitlab/gitlab-ee/debian buster InRelease' is not signed.`,
see the [troubleshooting section](#update-the-gpg-key-for-the-gitlab-repositories).
1. After the update process is complete, you'll see a message like the
following:
1. After the update process is complete, a message like the
following appears:
```plaintext
Upgrade complete! If your GitLab server is misbehaving try running
@ -300,7 +300,7 @@ GPG key.
The pre-configured GitLab image in Azure (provided by Bitnami) uses
a GPG key [deprecated in April 2020](https://about.gitlab.com/blog/2020/03/30/gpg-key-for-gitlab-package-repositories-metadata-changing/).
If you try to update the repositories, you'll get the following error:
If you try to update the repositories, the system returns the following error:
<!-- vale gitlab.ReferenceLinks = NO -->

View File

@ -599,7 +599,6 @@ repository's root as `.gitlab-api-fuzzing.yml`.
| `FUZZAPI_TARGET_URL` | Base URL of API testing target. |
|[`FUZZAPI_CONFIG`](#configuration-files) | API Fuzzing configuration file. Defaults to `.gitlab-apifuzzer.yml`. |
|[`FUZZAPI_PROFILE`](#configuration-files) | Configuration profile to use during testing. Defaults to `Quick`. |
| `FUZZAPI_REPORT` | Scan report filename. Defaults to `gl-api_fuzzing-report.xml`. |
|[`FUZZAPI_OPENAPI`](#openapi-specification) | OpenAPI specification file or URL. |
|[`FUZZAPI_HAR`](#http-archive-har) | HTTP Archive (HAR) file. |
|[`FUZZAPI_POSTMAN_COLLECTION`](#postman-collection) | Postman Collection file. |
@ -611,18 +610,6 @@ repository's root as `.gitlab-api-fuzzing.yml`.
|[`FUZZAPI_HTTP_USERNAME`](#http-basic-authentication) | Username for HTTP authentication. |
|[`FUZZAPI_HTTP_PASSWORD`](#http-basic-authentication) | Password for HTTP authentication. |
<!--|[`FUZZAPI_D_TARGET_IMAGE`](#target-container) |API target docker image |
|[`FUZZAPI_D_TARGET_ENV`](#target-container) |Docker environment options |
|[`FUZZAPI_D_TARGET_VOLUME`](#target-container) | Docker volume options |
|[`FUZZAPI_D_TARGET_PORTS`](#target-container) |Docker port options |
| `FUZZAPI_D_WORKER_IMAGE` |Custom worker docker image |
| `FUZZAPI_D_WORKER_ENV` |Custom worker docker environment options |
| `FUZZAPI_D_WORKER_VOLUME` |Custom worker docker volume options |
| `FUZZAPI_D_WORKER_PORTS` |Custom worker docker port options |
| `FUZZAPI_D_NETWORK` |Name of docker network, defaults to "testing-net"|
| `FUZZAPI_D_PRE_SCRIPT` |Pre script runs after docker login and docker network create, but before starting the scanning image container.|
| `FUZZAPI_D_POST_SCRIPT` |Post script runs after scanning image container is started. This is the place to start your target(s) and kick off scanning when using an advanced configuration.| -->
### Overrides
API Fuzzing provides a method to add or override specific items in your request, for example:

View File

@ -30,6 +30,8 @@ module Gitlab
Converter.new.convert(ansi, state)
end
Result = Struct.new(:html, :state, :append, :truncated, :offset, :size, :total, keyword_init: true) # rubocop:disable Lint/StructNewOverride
class Converter
def on_0(_)
reset
@ -278,9 +280,7 @@ module Gitlab
close_open_tags
# TODO: replace OpenStruct with a better type
# https://gitlab.com/gitlab-org/gitlab/issues/34305
OpenStruct.new(
Ansi2html::Result.new(
html: @out.force_encoding(Encoding.default_external),
state: state,
append: append,

View File

@ -5,41 +5,13 @@
# How to set: https://docs.gitlab.com/ee/ci/yaml/#variables
variables:
FUZZAPI_PROFILE: Quick
FUZZAPI_VERSION: "1"
FUZZAPI_CONFIG: .gitlab-api-fuzzing.yml
FUZZAPI_TIMEOUT: 30
FUZZAPI_REPORT: gl-api-fuzzing-report.json
FUZZAPI_REPORT_ASSET_PATH: assets
#
# Wait up to 5 minutes for API Fuzzer and target url to become
# available (non 500 response to HTTP(s))
FUZZAPI_SERVICE_START_TIMEOUT: "300"
#
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
FUZZAPI_IMAGE: ${SECURE_ANALYZERS_PREFIX}/api-fuzzing:${FUZZAPI_VERSION}
#
apifuzzer_fuzz_unlicensed:
stage: fuzz
allow_failure: true
rules:
- if: '$GITLAB_FEATURES !~ /\bapi_fuzzing\b/ && $API_FUZZING_DISABLED == null'
- when: never
script:
- |
echo "Error: Your GitLab project is not licensed for API Fuzzing."
- exit 1
apifuzzer_fuzz:
stage: fuzz
image: $FUZZAPI_IMAGE
variables:
FUZZAPI_PROJECT: $CI_PROJECT_PATH
FUZZAPI_API: http://localhost:80
FUZZAPI_NEW_REPORT: 1
FUZZAPI_LOG_SCANNER: gl-apifuzzing-api-scanner.log
TZ: America/Los_Angeles
allow_failure: true
rules:
- if: $API_FUZZING_DISABLED
@ -47,44 +19,16 @@ apifuzzer_fuzz:
- if: $API_FUZZING_DISABLED_FOR_DEFAULT_BRANCH &&
$CI_DEFAULT_BRANCH == $CI_COMMIT_REF_NAME
when: never
- if: $CI_COMMIT_BRANCH && $GITLAB_FEATURES =~ /\bapi_fuzzing\b/
- if: $CI_COMMIT_BRANCH
script:
#
# Validate options
- |
if [ "$FUZZAPI_HAR$FUZZAPI_OPENAPI$FUZZAPI_POSTMAN_COLLECTION" == "" ]; then \
echo "Error: One of FUZZAPI_HAR, FUZZAPI_OPENAPI, or FUZZAPI_POSTMAN_COLLECTION must be provided."; \
echo "See https://docs.gitlab.com/ee/user/application_security/api_fuzzing/ for information on how to configure API Fuzzing."; \
exit 1; \
fi
#
# Run user provided pre-script
- sh -c "$FUZZAPI_PRE_SCRIPT"
#
# Make sure asset path exists
- mkdir -p $FUZZAPI_REPORT_ASSET_PATH
#
# Start API Security background process
- dotnet /peach/Peach.Web.dll &> $FUZZAPI_LOG_SCANNER &
- APISEC_PID=$!
#
# Start scanning
- worker-entry
#
# Run user provided post-script
- sh -c "$FUZZAPI_POST_SCRIPT"
#
# Shutdown API Security
- kill $APISEC_PID
- wait $APISEC_PID
#
- /peach/analyzer-fuzz-api
artifacts:
when: always
paths:
- $FUZZAPI_REPORT_ASSET_PATH
- $FUZZAPI_REPORT
- $FUZZAPI_LOG_SCANNER
- gl-assets
- gl-api-fuzzing-report.json
- gl-*.log
reports:
api_fuzzing: $FUZZAPI_REPORT
api_fuzzing: gl-api-fuzzing-report.json
# end

View File

@ -86,8 +86,8 @@
| `nodes` | `[item!]` | The items in the current page. |
The precise type of `Edge` and `Item` depends on the kind of connection. A
[`UserConnection`](#userconnection) will have nodes that have the type
[`[User!]`](#user), and edges that have the type [`UserEdge`](#useredge).
[`ProjectConnection`](#projectconnection) will have nodes that have the type
[`[Project!]`](#project), and edges that have the type [`ProjectEdge`](#projectedge).
### Connection types

View File

@ -13,7 +13,8 @@ module Gitlab
# inner Schema::Object#object. This depends on whether the field
# has a @resolver_proc or not.
if object.is_a?(::Types::BaseObject)
object.present(field.owner, attrs)
type = field.owner.kind.abstract? ? object.class : field.owner
object.present(type, attrs)
yield(object, arguments)
else
# This is the legacy code-path, hit if the field has a @resolver_proc

View File

@ -4047,11 +4047,6 @@ msgid_plural "ApprovalRuleRemove|%d members"
msgstr[0] ""
msgstr[1] ""
msgid "ApprovalRuleRemove|Approvals from this member are not revoked."
msgid_plural "ApprovalRuleRemove|Approvals from these members are not revoked."
msgstr[0] ""
msgstr[1] ""
msgid "ApprovalRuleRemove|Remove approval gate"
msgstr ""
@ -4061,8 +4056,10 @@ msgstr ""
msgid "ApprovalRuleRemove|You are about to remove the %{name} approval gate. Approval from this service is not revoked."
msgstr ""
msgid "ApprovalRuleRemove|You are about to remove the %{name} approver group which has %{nMembers}."
msgstr ""
msgid "ApprovalRuleRemove|You are about to remove the %{name} approver group which has %{strongStart}%{count} member%{strongEnd}. Approvals from this member are not revoked."
msgid_plural "ApprovalRuleRemove|You are about to remove the %{name} approver group which has %{strongStart}%{count} members%{strongEnd}. Approvals from these members are not revoked."
msgstr[0] ""
msgstr[1] ""
msgid "ApprovalRuleSummary|%d member"
msgid_plural "ApprovalRuleSummary|%d members"
@ -21041,9 +21038,6 @@ msgstr ""
msgid "More Information"
msgstr ""
msgid "More Information."
msgstr ""
msgid "More Slack commands"
msgstr ""
@ -36086,6 +36080,9 @@ msgstr ""
msgid "WikiPageConflictMessage|Someone edited the page the same time you did. Please check out %{wikiLinkStart}the page%{wikiLinkEnd} and make sure your changes will not unintentionally remove theirs."
msgstr ""
msgid "WikiPage|Cancel"
msgstr ""
msgid "WikiPage|Commit message"
msgstr ""
@ -36101,9 +36098,21 @@ msgstr ""
msgid "WikiPage|Format"
msgstr ""
msgid "WikiPage|More Information."
msgstr ""
msgid "WikiPage|Page title"
msgstr ""
msgid "WikiPage|Save changes"
msgstr ""
msgid "WikiPage|Switch to old editor"
msgstr ""
msgid "WikiPage|Switching to the old editor will discard any changes you've made in the new editor."
msgstr ""
msgid "WikiPage|Tip: You can move this page by adding the path to the beginning of the title."
msgstr ""
@ -36119,9 +36128,15 @@ msgstr ""
msgid "WikiPage|Update %{pageTitle}"
msgstr ""
msgid "WikiPage|Use new editor"
msgstr ""
msgid "WikiPage|Write your content or drag files here…"
msgstr ""
msgid "WikiPage|You are editing this page with Content Editor. This editor is in beta and may not display the page's contents properly."
msgstr ""
msgid "Wikis"
msgstr ""

View File

@ -95,6 +95,10 @@ FactoryBot.define do
severity { 'unknown' }
end
trait :threat_monitoring do
domain { :threat_monitoring }
end
trait :prometheus do
monitoring_tool { Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus] }
payload do

View File

@ -8,29 +8,28 @@
"packageUsername",
"packageChannel",
"recipe",
"recipePath",
"packageName"
"recipePath"
],
"properties": {
"id": {
"type": "string"
},
"created_at": {
"createdAt": {
"type": "string"
},
"updated_at": {
"updatedAt": {
"type": "string"
},
"package_username": {
"packageUsername": {
"type": "string"
},
"package_channel": {
"packageChannel": {
"type": "string"
},
"recipe": {
"type": "string"
},
"recipe_path": {
"recipePath": {
"type": "string"
}
}

View File

@ -1,36 +1,38 @@
import { shallowMount } from '@vue/test-utils';
import { mount } from '@vue/test-utils';
import { EditorContent } from 'tiptap';
import waitForPromises from 'helpers/wait_for_promises';
import ContentEditor from '~/content_editor/components/content_editor.vue';
import TopToolbar from '~/content_editor/components/top_toolbar.vue';
import createEditor from '~/content_editor/services/create_editor';
import createMarkdownSerializer from '~/content_editor/services/markdown_serializer';
describe('ContentEditor', () => {
let wrapper;
let editor;
const buildWrapper = async () => {
editor = await createEditor({ serializer: createMarkdownSerializer({ toHTML: () => '' }) });
wrapper = shallowMount(ContentEditor, {
const createWrapper = async (_editor) => {
wrapper = mount(ContentEditor, {
propsData: {
editor,
editor: _editor,
},
});
};
beforeEach(async () => {
editor = await createEditor({ renderMarkdown: () => 'sample text' });
createWrapper(editor);
await waitForPromises();
});
afterEach(() => {
wrapper.destroy();
});
it('renders editor content component and attaches editor instance', async () => {
await buildWrapper();
expect(wrapper.findComponent(EditorContent).props().editor).toBe(editor);
});
it('renders top toolbar component and attaches editor instance', async () => {
await buildWrapper();
expect(wrapper.findComponent(TopToolbar).props().editor).toBe(editor);
});
});

View File

@ -11,12 +11,12 @@ describe('content_editor/services/create_editor', () => {
deserialize: jest.fn(),
});
it('sets gl-py-4 gl-px-5 class selectors to editor attributes', async () => {
it('sets gl-outline-0! class selector to editor attributes', async () => {
const editor = await createEditor({ renderMarkdown });
expect(editor.options.editorProps).toMatchObject({
attributes: {
class: 'gl-py-4 gl-px-5',
class: 'gl-outline-0!',
},
});
});

View File

@ -1,6 +1,12 @@
import { GlAlert, GlButton, GlLoadingIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import ContentEditor from '~/content_editor/components/content_editor.vue';
import WikiForm from '~/pages/shared/wikis/components/wiki_form.vue';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
describe('WikiForm', () => {
let wrapper;
@ -11,10 +17,26 @@ describe('WikiForm', () => {
const findContent = () => wrapper.find('#wiki_content');
const findMessage = () => wrapper.find('#wiki_message');
const findSubmitButton = () => wrapper.findByTestId('wiki-submit-button');
const findCancelButton = () => wrapper.findByTestId('wiki-cancel-button');
const findTitleHelpLink = () => wrapper.findByTestId('wiki-title-help-link');
const findCancelButton = () => wrapper.findByRole('link', { name: 'Cancel' });
const findUseNewEditorButton = () => wrapper.findByRole('button', { name: 'Use new editor' });
const findTitleHelpLink = () => wrapper.findByRole('link', { name: 'More Information.' });
const findMarkdownHelpLink = () => wrapper.findByTestId('wiki-markdown-help-link');
const setFormat = (value) => {
const format = findFormat();
format.find(`option[value=${value}]`).setSelected();
format.element.dispatchEvent(new Event('change'));
};
const triggerFormSubmit = () => findForm().element.dispatchEvent(new Event('submit'));
const dispatchBeforeUnload = () => {
const e = new Event('beforeunload');
jest.spyOn(e, 'preventDefault');
window.dispatchEvent(e);
return e;
};
const pageInfoNew = {
persisted: false,
uploadsPath: '/project/path/-/wikis/attachments',
@ -35,7 +57,10 @@ describe('WikiForm', () => {
path: '/project/path/-/wikis/home',
};
function createWrapper(persisted = false, pageInfo = {}) {
function createWrapper(
persisted = false,
{ pageInfo, glFeatures } = { glFeatures: { wikiContentEditor: false } },
) {
wrapper = extendedWrapper(
mount(
WikiForm,
@ -51,13 +76,12 @@ describe('WikiForm', () => {
...(persisted ? pageInfoPersisted : pageInfoNew),
...pageInfo,
},
glFeatures,
},
},
{ attachToDocument: true },
),
);
jest.spyOn(wrapper.vm, 'onBeforeUnload');
}
afterEach(() => {
@ -101,7 +125,7 @@ describe('WikiForm', () => {
`('updates the link help message when format=$value is selected', async ({ value, text }) => {
createWrapper();
findFormat().find(`option[value=${value}]`).setSelected();
setFormat(value);
await wrapper.vm.$nextTick();
@ -113,9 +137,9 @@ describe('WikiForm', () => {
await wrapper.vm.$nextTick();
window.dispatchEvent(new Event('beforeunload'));
expect(wrapper.vm.onBeforeUnload).not.toHaveBeenCalled();
const e = dispatchBeforeUnload();
expect(typeof e.returnValue).not.toBe('string');
expect(e.preventDefault).not.toHaveBeenCalled();
});
it.each`
@ -156,19 +180,18 @@ describe('WikiForm', () => {
});
it('sets before unload warning', () => {
window.dispatchEvent(new Event('beforeunload'));
const e = dispatchBeforeUnload();
expect(wrapper.vm.onBeforeUnload).toHaveBeenCalled();
expect(e.preventDefault).toHaveBeenCalledTimes(1);
});
it('when form submitted, unsets before unload warning', async () => {
findForm().element.dispatchEvent(new Event('submit'));
triggerFormSubmit();
await wrapper.vm.$nextTick();
window.dispatchEvent(new Event('beforeunload'));
expect(wrapper.vm.onBeforeUnload).not.toHaveBeenCalled();
const e = dispatchBeforeUnload();
expect(e.preventDefault).not.toHaveBeenCalled();
});
});
@ -219,4 +242,161 @@ describe('WikiForm', () => {
},
);
});
describe('when feature flag wikiContentEditor is enabled', () => {
beforeEach(() => {
createWrapper(true, { glFeatures: { wikiContentEditor: true } });
});
it.each`
format | buttonExists
${'markdown'} | ${true}
${'rdoc'} | ${false}
`(
'switch to new editor button exists: $buttonExists if format is $format',
async ({ format, buttonExists }) => {
setFormat(format);
await wrapper.vm.$nextTick();
expect(findUseNewEditorButton().exists()).toBe(buttonExists);
},
);
const assertOldEditorIsVisible = () => {
expect(wrapper.findComponent(ContentEditor).exists()).toBe(false);
expect(wrapper.findComponent(MarkdownField).exists()).toBe(true);
expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
};
it('shows old editor by default', assertOldEditorIsVisible);
describe('switch format to rdoc', () => {
beforeEach(async () => {
setFormat('rdoc');
await wrapper.vm.$nextTick();
});
it('continues to show the old editor', assertOldEditorIsVisible);
describe('switch format back to markdown', () => {
beforeEach(async () => {
setFormat('rdoc');
await wrapper.vm.$nextTick();
});
it(
'still shows the old editor and does not automatically switch to the content editor ',
assertOldEditorIsVisible,
);
});
});
describe('clicking "use new editor"', () => {
let mock;
beforeEach(async () => {
mock = new MockAdapter(axios);
mock.onPost(/preview-markdown/).reply(200, { body: '<p>hello <strong>world</strong></p>' });
findUseNewEditorButton().trigger('click');
await wrapper.vm.$nextTick();
});
afterEach(() => {
mock.restore();
});
it('shows a loading indicator for the rich text editor', () => {
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('shows a warning alert that the rich text editor is in beta', () => {
expect(wrapper.findComponent(GlAlert).text()).toContain(
"You are editing this page with Content Editor. This editor is in beta and may not display the page's contents properly.",
);
});
it('shows the rich text editor when loading finishes', async () => {
// wait for content editor to load
await waitForPromises();
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
expect(wrapper.findComponent(ContentEditor).exists()).toBe(true);
});
it('disables the format dropdown', () => {
expect(findFormat().element.getAttribute('disabled')).toBeDefined();
});
describe('when wiki content is updated', () => {
beforeEach(async () => {
// wait for content editor to load
await waitForPromises();
wrapper.vm.editor.setContent('<p>hello __world__ from content editor</p>', true);
await waitForPromises();
return wrapper.vm.$nextTick();
});
it('sets before unload warning', () => {
const e = dispatchBeforeUnload();
expect(e.preventDefault).toHaveBeenCalledTimes(1);
});
it('unsets before unload warning on form submit', async () => {
triggerFormSubmit();
await wrapper.vm.$nextTick();
const e = dispatchBeforeUnload();
expect(e.preventDefault).not.toHaveBeenCalled();
});
});
it('updates content from content editor on form submit', async () => {
// old value
expect(findContent().element.value).toBe('My page content');
// wait for content editor to load
await waitForPromises();
triggerFormSubmit();
await wrapper.vm.$nextTick();
expect(findContent().element.value).toBe('hello **world**');
});
describe('clicking "switch to old editor"', () => {
beforeEach(async () => {
// wait for content editor to load
await waitForPromises();
wrapper.vm.editor.setContent('<p>hello __world__ from content editor</p>', true);
wrapper.findComponent(GlAlert).findComponent(GlButton).trigger('click');
await wrapper.vm.$nextTick();
});
it('switches to old editor', () => {
expect(wrapper.findComponent(ContentEditor).exists()).toBe(false);
expect(wrapper.findComponent(MarkdownField).exists()).toBe(true);
});
it('does not show a warning alert about content editor', () => {
expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
});
it('the old editor retains its old value and does not use the content from the content editor', () => {
expect(findContent().element.value).toBe('My page content');
});
});
});
});
});

View File

@ -8,11 +8,46 @@ RSpec.describe Resolvers::GroupPackagesResolver do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, group: group) }
let_it_be(:package) { create(:package, project: project) }
let(:args) do
{ sort: :created_desc }
end
describe '#resolve' do
subject(:packages) { resolve(described_class, ctx: { current_user: user }, obj: group) }
subject { resolve(described_class, ctx: { current_user: user }, obj: group, args: args).to_a }
it { is_expected.to contain_exactly(package) }
context 'without sort' do
let_it_be(:package) { create(:package, project: project) }
it { is_expected.to contain_exactly(package) }
end
context 'with a sort argument' do
let_it_be(:project2) { create(:project, :public, group: group) }
let_it_be(:sort_repository) do
create(:conan_package, name: 'bar', project: project, created_at: 1.day.ago, version: "1.0.0")
end
let_it_be(:sort_repository2) do
create(:maven_package, name: 'foo', project: project2, created_at: 1.hour.ago, version: "2.0.0")
end
[:created_desc, :name_desc, :version_desc, :type_asc, :project_path_desc].each do |order|
context "#{order}" do
let(:args) { { sort: order } }
it { is_expected.to eq([sort_repository2, sort_repository]) }
end
end
[:created_asc, :name_asc, :version_asc, :type_desc, :project_path_asc].each do |order|
context "#{order}" do
let(:args) { { sort: order } }
it { is_expected.to eq([sort_repository, sort_repository2]) }
end
end
end
end
end

View File

@ -7,11 +7,44 @@ RSpec.describe Resolvers::ProjectPackagesResolver do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:package) { create(:package, project: project) }
let(:args) do
{ sort: :created_desc }
end
describe '#resolve' do
subject(:packages) { resolve(described_class, ctx: { current_user: user }, obj: project) }
subject { resolve(described_class, ctx: { current_user: user }, obj: project, args: args).to_a }
it { is_expected.to contain_exactly(package) }
context 'without sort' do
let_it_be(:package) { create(:package, project: project) }
it { is_expected.to contain_exactly(package) }
end
context 'with a sort argument' do
let_it_be(:sort_repository) do
create(:conan_package, name: 'bar', project: project, created_at: 1.day.ago, version: "1.0.0")
end
let_it_be(:sort_repository2) do
create(:maven_package, name: 'foo', project: project, created_at: 1.hour.ago, version: "2.0.0")
end
[:created_desc, :name_desc, :version_desc, :type_asc].each do |order|
context "#{order}" do
let(:args) { { sort: order } }
it { is_expected.to eq([sort_repository2, sort_repository]) }
end
end
[:created_asc, :name_asc, :version_asc, :type_desc].each do |order|
context "#{order}" do
let(:args) { { sort: order } }
it { is_expected.to eq([sort_repository, sort_repository2]) }
end
end
end
end
end

View File

@ -21,6 +21,7 @@ RSpec.describe GitlabSchema.types['Query'] do
user
users
issue
merge_request
usage_trends_measurements
runner_platforms
]
@ -60,11 +61,21 @@ RSpec.describe GitlabSchema.types['Query'] do
describe 'issue field' do
subject { described_class.fields['issue'] }
it 'returns issue' do
it "finds an issue by it's gid" do
is_expected.to have_graphql_arguments(:id)
is_expected.to have_graphql_type(Types::IssueType)
end
end
describe 'merge_request field' do
subject { described_class.fields['mergeRequest'] }
it "finds a merge_request by it's gid" do
is_expected.to have_graphql_arguments(:id)
is_expected.to have_graphql_type(Types::MergeRequestType)
end
end
describe 'usage_trends_measurements field' do
subject { described_class.fields['usageTrendsMeasurements'] }

View File

@ -5,7 +5,11 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['User'] do
specify { expect(described_class.graphql_name).to eq('User') }
specify { expect(described_class).to require_graphql_authorizations(:read_user) }
specify do
runtime_type = described_class.resolve_type(build(:user), {})
expect(runtime_type).to require_graphql_authorizations(:read_user)
end
it 'has the expected fields' do
expected_fields = %w[

View File

@ -33,6 +33,38 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
end
end
context 'when the field is declared on an interface, and implemented by a presenter' do
let(:interface) do
Module.new do
include ::Types::BaseInterface
field :interface_field, GraphQL::STRING_TYPE, null: true
end
end
let(:implementation) do
type = fresh_object_type('Concrete')
type.present_using(concrete_impl)
type.implements(interface)
type
end
def concrete_impl
Class.new(base_presenter) do
def interface_field
'made of concrete'
end
end
end
it 'resolves the interface field using the implementation from the presenter' do
field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::STRING_TYPE, null: true, owner: interface)
value = resolve_field(field, object, object_type: implementation)
expect(value).to eq 'made of concrete'
end
end
describe 'interactions with inheritance' do
def parent
type = fresh_object_type('Parent')

View File

@ -12,11 +12,11 @@ RSpec.describe 'getting a package list for a group' do
let_it_be(:group_two_project) { create(:project, :repository, group: group_two) }
let_it_be(:current_user) { create(:user) }
let_it_be(:package) { create(:package, project: project) }
let_it_be(:npm_package) { create(:npm_package, project: group_two_project) }
let_it_be(:maven_package) { create(:maven_package, project: project) }
let_it_be(:debian_package) { create(:debian_package, project: another_project) }
let_it_be(:composer_package) { create(:composer_package, project: another_project) }
let_it_be(:maven_package) { create(:maven_package, project: project, name: 'tab', version: '4.0.0', created_at: 5.days.ago) }
let_it_be(:package) { create(:npm_package, project: project, name: 'uab', version: '5.0.0', created_at: 4.days.ago) }
let_it_be(:composer_package) { create(:composer_package, project: another_project, name: 'vab', version: '6.0.0', created_at: 3.days.ago) }
let_it_be(:debian_package) { create(:debian_package, project: another_project, name: 'zab', version: '7.0.0', created_at: 2.days.ago) }
let_it_be(:composer_metadatum) do
create(:composer_metadatum, package: composer_package,
target_sha: 'afdeh',
@ -46,6 +46,42 @@ RSpec.describe 'getting a package list for a group' do
it_behaves_like 'group and project packages query'
describe 'sorting and pagination' do
let_it_be(:ascending_packages) { [maven_package, package, composer_package, debian_package].map { |package| global_id_of(package)} }
let(:data_path) { [:group, :packages] }
before do
resource.add_reporter(current_user)
end
[:CREATED_ASC, :NAME_ASC, :VERSION_ASC, :TYPE_ASC].each do |order|
context "#{order}" do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { order }
let(:first_param) { 4 }
let(:expected_results) { ascending_packages }
end
end
end
[:CREATED_DESC, :NAME_DESC, :VERSION_DESC, :TYPE_DESC].each do |order|
context "#{order}" do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { order }
let(:first_param) { 4 }
let(:expected_results) { ascending_packages.reverse }
end
end
end
def pagination_query(params)
graphql_query_for(:group, { 'fullPath' => resource.full_path },
query_nodes(:packages, :id, include_pagination_info: true, args: params)
)
end
end
context 'with a batched query' do
let(:batch_query) do
<<~QUERY

View File

@ -76,7 +76,7 @@ RSpec.describe 'Query.issue(id)' do
post_graphql(query, current_user: current_user)
end
it "returns the Issue and field #{params['field']}" do
it "returns the issue and field #{params['field']}" do
expect(issue_data.keys).to eq([field])
end
end
@ -86,7 +86,7 @@ RSpec.describe 'Query.issue(id)' do
context 'when selecting multiple fields' do
let(:issue_fields) { ['title', 'description', 'updatedBy { username }'] }
it 'returns the Issue with the specified fields' do
it 'returns the issue with the specified fields' do
post_graphql(query, current_user: current_user)
expect(issue_data.keys).to eq %w[title description updatedBy]
@ -115,7 +115,7 @@ RSpec.describe 'Query.issue(id)' do
end
end
context 'when passed a non-Issue gid' do
context 'when passed a non-issue gid' do
let(:mr) { create(:merge_request) }
it 'returns an error' do

View File

@ -0,0 +1,111 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Query.merge_request(id)' do
include GraphqlHelpers
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let_it_be(:current_user) { create(:user) }
let_it_be(:merge_request_params) { { 'id' => merge_request.to_global_id.to_s } }
let(:merge_request_data) { graphql_data['mergeRequest'] }
let(:merge_request_fields) { all_graphql_fields_for('MergeRequest'.classify) }
let(:query) do
graphql_query_for('mergeRequest', merge_request_params, merge_request_fields)
end
it_behaves_like 'a working graphql query' do
before do
post_graphql(query, current_user: current_user)
end
end
it_behaves_like 'a noteable graphql type we can query' do
let(:noteable) { merge_request }
let(:project) { merge_request.project }
let(:path_to_noteable) { [:merge_request] }
before do
project.add_reporter(current_user)
end
def query(fields)
graphql_query_for('mergeRequest', merge_request_params, fields)
end
end
context 'when the user does not have access to the merge request' do
it 'returns nil' do
post_graphql(query)
expect(merge_request_data).to be nil
end
end
context 'when the user does have access' do
before do
project.add_reporter(current_user)
end
it 'returns the merge request' do
post_graphql(query, current_user: current_user)
expect(merge_request_data).to include(
'title' => merge_request.title,
'description' => merge_request.description
)
end
context 'when selecting any single field' do
where(:field) do
scalar_fields_of('MergeRequest').map { |name| [name] }
end
with_them do
it_behaves_like 'a working graphql query' do
let(:merge_request_fields) do
field
end
before do
post_graphql(query, current_user: current_user)
end
it "returns the merge request and field #{params['field']}" do
expect(merge_request_data.keys).to eq([field])
end
end
end
end
context 'when selecting multiple fields' do
let(:merge_request_fields) { ['title', 'description', 'author { username }'] }
it 'returns the merge request with the specified fields' do
post_graphql(query, current_user: current_user)
expect(merge_request_data.keys).to eq %w[title description author]
expect(merge_request_data['title']).to eq(merge_request.title)
expect(merge_request_data['description']).to eq(merge_request.description)
expect(merge_request_data['author']['username']).to eq(merge_request.author.username)
end
end
context 'when passed a non-merge request gid' do
let(:issue) { create(:issue) }
it 'returns an error' do
gid = issue.to_global_id.to_s
merge_request_params['id'] = gid
post_graphql(query, current_user: current_user)
expect(graphql_errors).not_to be nil
expect(graphql_errors.first['message']).to eq("\"#{gid}\" does not represent an instance of MergeRequest")
end
end
end
end

View File

@ -0,0 +1,64 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'package details' do
include GraphqlHelpers
let_it_be(:project) { create(:project) }
let_it_be(:composer_package) { create(:composer_package, project: project) }
let_it_be(:composer_json) { { name: 'name', type: 'type', license: 'license', version: 1 } }
let_it_be(:composer_metadatum) do
# we are forced to manually create the metadatum, without using the factory to force the sha to be a string
# and avoid an error where gitaly can't find the repository
create(:composer_metadatum, package: composer_package, target_sha: 'foo_sha', composer_json: composer_json)
end
let(:depth) { 3 }
let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
let(:metadata) { query_graphql_fragment('ComposerMetadata') }
let(:package_files) { all_graphql_fields_for('PackageFile') }
let(:user) { project.owner }
let(:package_global_id) { global_id_of(composer_package) }
let(:package_details) { graphql_data_at(:package) }
let(:metadata_response) { graphql_data_at(:package, :metadata) }
let(:package_files_response) { graphql_data_at(:package, :package_files, :nodes) }
let(:query) do
graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
#{all_graphql_fields_for('PackageDetailsType', max_depth: depth, excluded: excluded)}
metadata {
#{metadata}
}
packageFiles {
nodes {
#{package_files}
}
}
FIELDS
end
subject { post_graphql(query, current_user: user) }
before do
subject
end
it_behaves_like 'a working graphql query' do
it 'matches the JSON schema' do
expect(package_details).to match_schema('graphql/packages/package_details')
end
end
describe 'Composer' do
it 'has the correct metadata' do
expect(metadata_response).to include(
'targetSha' => 'foo_sha',
'composerJson' => composer_json.transform_keys(&:to_s).transform_values(&:to_s)
)
end
it 'does not have files' do
expect(package_files_response).to be_empty
end
end
end

View File

@ -0,0 +1,90 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'conan package details' do
include GraphqlHelpers
let_it_be(:project) { create(:project) }
let_it_be(:conan_package) { create(:conan_package, project: project) }
let(:package_global_id) { global_id_of(conan_package) }
let(:metadata) { query_graphql_fragment('ConanMetadata') }
let(:first_file) { conan_package.package_files.find { |f| global_id_of(f) == first_file_response['id'] } }
let(:depth) { 3 }
let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
let(:package_files) { all_graphql_fields_for('PackageFile') }
let(:package_files_metadata) {query_graphql_fragment('ConanFileMetadata')}
let(:user) { project.owner }
let(:package_details) { graphql_data_at(:package) }
let(:metadata_response) { graphql_data_at(:package, :metadata) }
let(:package_files_response) { graphql_data_at(:package, :package_files, :nodes) }
let(:first_file_response) { graphql_data_at(:package, :package_files, :nodes, 0)}
let(:first_file_response_metadata) { graphql_data_at(:package, :package_files, :nodes, 0, :file_metadata)}
let(:query) do
graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
#{all_graphql_fields_for('PackageDetailsType', max_depth: depth, excluded: excluded)}
metadata {
#{metadata}
}
packageFiles {
nodes {
#{package_files}
fileMetadata {
#{package_files_metadata}
}
}
}
FIELDS
end
subject { post_graphql(query, current_user: user) }
before do
subject
end
it_behaves_like 'a working graphql query' do
it 'matches the JSON schema' do
expect(package_details).to match_schema('graphql/packages/package_details')
end
end
it 'has the correct metadata' do
expect(metadata_response).to include(
'id' => global_id_of(conan_package.conan_metadatum),
'recipe' => conan_package.conan_metadatum.recipe,
'packageChannel' => conan_package.conan_metadatum.package_channel,
'packageUsername' => conan_package.conan_metadatum.package_username,
'recipePath' => conan_package.conan_metadatum.recipe_path
)
end
it 'has the right amount of files' do
expect(package_files_response.length).to be(conan_package.package_files.length)
end
it 'has the basic package files data' do
expect(first_file_response).to include(
'id' => global_id_of(first_file),
'fileName' => first_file.file_name,
'size' => first_file.size.to_s,
'downloadPath' => first_file.download_path,
'fileSha1' => first_file.file_sha1,
'fileMd5' => first_file.file_md5,
'fileSha256' => first_file.file_sha256
)
end
it 'has the correct file metadata' do
expect(first_file_response_metadata).to include(
'id' => global_id_of(first_file.conan_file_metadatum),
'packageRevision' => first_file.conan_file_metadatum.package_revision,
'conanPackageReference' => first_file.conan_file_metadatum.conan_package_reference,
'recipeRevision' => first_file.conan_file_metadatum.recipe_revision,
'conanFileType' => first_file.conan_file_metadatum.conan_file_type.upcase
)
end
end

View File

@ -17,7 +17,9 @@ RSpec.describe 'package details' do
let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
let(:metadata) { query_graphql_fragment('ComposerMetadata') }
let(:package_files) {all_graphql_fields_for('PackageFile')}
let(:package_files_metadata) {query_graphql_fragment('ConanFileMetadata')}
let(:user) { project.owner }
let(:package_global_id) { global_id_of(composer_package) }
let(:package_details) { graphql_data_at(:package) }
let(:query) do
graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
@ -28,22 +30,11 @@ RSpec.describe 'package details' do
packageFiles {
nodes {
#{package_files}
fileMetadata {
#{package_files_metadata}
}
}
}
FIELDS
end
let(:user) { project.owner }
let(:package_global_id) { global_id_of(composer_package) }
let(:package_details) { graphql_data_at(:package) }
let(:metadata_response) { graphql_data_at(:package, :metadata) }
let(:package_files_response) { graphql_data_at(:package, :package_files, :nodes) }
let(:first_file_response) { graphql_data_at(:package, :package_files, :nodes, 0)}
let(:first_file_response_metadata) { graphql_data_at(:package, :package_files, :nodes, 0, :file_metadata)}
subject { post_graphql(query, current_user: user) }
it_behaves_like 'a working graphql query' do
@ -56,69 +47,6 @@ RSpec.describe 'package details' do
end
end
describe 'Packages Metadata' do
before do
subject
end
describe 'Composer' do
it 'has the correct metadata' do
expect(metadata_response).to include(
'targetSha' => 'foo_sha',
'composerJson' => composer_json.transform_keys(&:to_s).transform_values(&:to_s)
)
end
it 'does not have files' do
expect(package_files_response).to be_empty
end
end
describe 'Conan' do
let_it_be(:conan_package) { create(:conan_package, project: project) }
let(:package_global_id) { global_id_of(conan_package) }
let(:metadata) { query_graphql_fragment('ConanMetadata') }
let(:first_file) { conan_package.package_files.find { |f| global_id_of(f) == first_file_response['id'] } }
it 'has the correct metadata' do
expect(metadata_response).to include(
'id' => global_id_of(conan_package.conan_metadatum),
'recipe' => conan_package.conan_metadatum.recipe,
'packageChannel' => conan_package.conan_metadatum.package_channel,
'packageUsername' => conan_package.conan_metadatum.package_username,
'recipePath' => conan_package.conan_metadatum.recipe_path
)
end
it 'has the right amount of files' do
expect(package_files_response.length).to be(conan_package.package_files.length)
end
it 'has the basic package files data' do
expect(first_file_response).to include(
'id' => global_id_of(first_file),
'fileName' => first_file.file_name,
'size' => first_file.size.to_s,
'downloadPath' => first_file.download_path,
'fileSha1' => first_file.file_sha1,
'fileMd5' => first_file.file_md5,
'fileSha256' => first_file.file_sha256
)
end
it 'has the correct file metadata' do
expect(first_file_response_metadata).to include(
'id' => global_id_of(first_file.conan_file_metadatum),
'packageRevision' => first_file.conan_file_metadatum.package_revision,
'conanPackageReference' => first_file.conan_file_metadatum.conan_package_reference,
'recipeRevision' => first_file.conan_file_metadatum.recipe_revision,
'conanFileType' => first_file.conan_file_metadatum.conan_file_type.upcase
)
end
end
end
context 'there are other versions of this package' do
let(:depth) { 3 }
let(:excluded) { %w[metadata project tags pipelines] } # to limit the query complexity

View File

@ -311,23 +311,23 @@ RSpec.describe 'getting merge request information nested in a project' do
end
end
context 'when requesting information about MR interactions' do
shared_examples 'when requesting information about MR interactions' do
let_it_be(:user) { create(:user) }
let(:selected_fields) { all_graphql_fields_for('UserMergeRequestInteraction') }
let(:mr_fields) do
query_nodes(
:reviewers,
field,
query_graphql_field(:merge_request_interaction, nil, selected_fields)
)
end
def interaction_data
graphql_data_at(:project, :merge_request, :reviewers, :nodes, :merge_request_interaction)
graphql_data_at(:project, :merge_request, field, :nodes, :merge_request_interaction)
end
context 'when the user does not have interactions' do
context 'when the user is not assigned' do
it 'returns null data' do
post_graphql(query)
@ -338,7 +338,7 @@ RSpec.describe 'getting merge request information nested in a project' do
context 'when the user is a reviewer, but has not reviewed' do
before do
project.add_guest(user)
merge_request.merge_request_reviewers.create!(reviewer: user)
assign_user(user)
end
it 'returns falsey values' do
@ -346,8 +346,8 @@ RSpec.describe 'getting merge request information nested in a project' do
expect(interaction_data).to contain_exactly a_hash_including(
'canMerge' => false,
'canUpdate' => false,
'reviewState' => 'UNREVIEWED',
'canUpdate' => can_update,
'reviewState' => unreviewed,
'reviewed' => false,
'approved' => false
)
@ -357,7 +357,9 @@ RSpec.describe 'getting merge request information nested in a project' do
context 'when the user has interacted' do
before do
project.add_maintainer(user)
merge_request.merge_request_reviewers.create!(reviewer: user, state: 'reviewed')
assign_user(user)
r = merge_request.merge_request_reviewers.find_or_create_by!(reviewer: user)
r.update!(state: 'reviewed')
merge_request.approved_by_users << user
end
@ -392,7 +394,10 @@ RSpec.describe 'getting merge request information nested in a project' do
end
it 'does not suffer from N+1' do
merge_request.merge_request_reviewers.create!(reviewer: user, state: 'reviewed')
assign_user(user)
merge_request.merge_request_reviewers
.find_or_create_by!(reviewer: user)
.update!(state: 'reviewed')
baseline = ActiveRecord::QueryRecorder.new do
post_graphql(query)
@ -401,7 +406,8 @@ RSpec.describe 'getting merge request information nested in a project' do
expect(interaction_data).to contain_exactly(include(reviewed))
other_users.each do |user|
merge_request.merge_request_reviewers.create!(reviewer: user)
assign_user(user)
merge_request.merge_request_reviewers.find_or_create_by!(reviewer: user)
end
expect { post_graphql(query) }.not_to exceed_query_limit(baseline)
@ -435,4 +441,24 @@ RSpec.describe 'getting merge request information nested in a project' do
end
end
end
it_behaves_like 'when requesting information about MR interactions' do
let(:field) { :reviewers }
let(:unreviewed) { 'UNREVIEWED' }
let(:can_update) { false }
def assign_user(user)
merge_request.merge_request_reviewers.create!(reviewer: user)
end
end
it_behaves_like 'when requesting information about MR interactions' do
let(:field) { :assignees }
let(:unreviewed) { nil }
let(:can_update) { true } # assignees can update MRs
def assign_user(user)
merge_request.assignees << user
end
end
end

View File

@ -7,11 +7,10 @@ RSpec.describe 'getting a package list for a project' do
let_it_be(:resource) { create(:project, :repository) }
let_it_be(:current_user) { create(:user) }
let_it_be(:package) { create(:package, project: resource) }
let_it_be(:maven_package) { create(:maven_package, project: resource) }
let_it_be(:debian_package) { create(:debian_package, project: resource) }
let_it_be(:composer_package) { create(:composer_package, project: resource) }
let_it_be(:maven_package) { create(:maven_package, project: resource, name: 'tab', version: '4.0.0', created_at: 5.days.ago) }
let_it_be(:package) { create(:npm_package, project: resource, name: 'uab', version: '5.0.0', created_at: 4.days.ago) }
let_it_be(:composer_package) { create(:composer_package, project: resource, name: 'vab', version: '6.0.0', created_at: 3.days.ago) }
let_it_be(:debian_package) { create(:debian_package, project: resource, name: 'zab', version: '7.0.0', created_at: 2.days.ago) }
let_it_be(:composer_metadatum) do
create(:composer_metadatum, package: composer_package,
target_sha: 'afdeh',
@ -40,4 +39,40 @@ RSpec.describe 'getting a package list for a project' do
end
it_behaves_like 'group and project packages query'
describe 'sorting and pagination' do
let_it_be(:ascending_packages) { [maven_package, package, composer_package, debian_package].map { |package| global_id_of(package)} }
let(:data_path) { [:project, :packages] }
before do
resource.add_reporter(current_user)
end
[:CREATED_ASC, :NAME_ASC, :VERSION_ASC, :TYPE_ASC].each do |order|
context "#{order}" do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { order }
let(:first_param) { 4 }
let(:expected_results) { ascending_packages }
end
end
end
[:CREATED_DESC, :NAME_DESC, :VERSION_DESC, :TYPE_DESC].each do |order|
context "#{order}" do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { order }
let(:first_param) { 4 }
let(:expected_results) { ascending_packages.reverse }
end
end
end
def pagination_query(params)
graphql_query_for(:project, { 'fullPath' => resource.full_path },
query_nodes(:packages, :id, include_pagination_info: true, args: params)
)
end
end
end

Some files were not shown because too many files have changed in this diff Show More