Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
c4acd4624d
commit
2b2299ea5f
|
|
@ -11,3 +11,6 @@ include:
|
|||
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
|
||||
inputs:
|
||||
gem_name: "click_house-client"
|
||||
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
|
||||
inputs:
|
||||
gem_name: "gitlab-schema-validation"
|
||||
|
|
|
|||
2
Gemfile
2
Gemfile
|
|
@ -348,6 +348,8 @@ gem 'sentry-sidekiq', '~> 5.8.0'
|
|||
#
|
||||
gem 'pg_query', '~> 4.2.1'
|
||||
|
||||
gem 'gitlab-schema-validation', path: 'gems/gitlab-schema-validation'
|
||||
|
||||
gem 'premailer-rails', '~> 1.10.3'
|
||||
|
||||
gem 'gitlab-labkit', '~> 0.33.0'
|
||||
|
|
|
|||
|
|
@ -18,6 +18,11 @@ PATH
|
|||
gitlab-rspec (0.1.0)
|
||||
rspec (~> 3.0)
|
||||
|
||||
PATH
|
||||
remote: gems/gitlab-schema-validation
|
||||
specs:
|
||||
gitlab-schema-validation (0.1.0)
|
||||
|
||||
PATH
|
||||
remote: gems/gitlab-utils
|
||||
specs:
|
||||
|
|
@ -1805,6 +1810,7 @@ DEPENDENCIES
|
|||
gitlab-markup (~> 1.9.0)
|
||||
gitlab-net-dns (~> 0.9.2)
|
||||
gitlab-rspec!
|
||||
gitlab-schema-validation!
|
||||
gitlab-sidekiq-fetcher!
|
||||
gitlab-styles (~> 10.1.0)
|
||||
gitlab-utils!
|
||||
|
|
|
|||
|
|
@ -78,6 +78,7 @@ export default {
|
|||
title: TOKEN_TITLE_AUTHOR,
|
||||
type: TOKEN_TYPE_AUTHOR,
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
initialUsers: this.authorsData,
|
||||
unique: true,
|
||||
operators: OPERATORS_IS,
|
||||
|
|
@ -88,6 +89,7 @@ export default {
|
|||
title: TOKEN_TITLE_ASSIGNEE,
|
||||
type: TOKEN_TYPE_ASSIGNEE,
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
initialUsers: this.assigneesData,
|
||||
unique: false,
|
||||
operators: OPERATORS_IS,
|
||||
|
|
|
|||
|
|
@ -75,6 +75,7 @@ export default {
|
|||
type: TOKEN_TYPE_ASSIGNEE,
|
||||
operators: OPERATORS_IS_NOT,
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
unique: true,
|
||||
fetchUsers,
|
||||
preloadedUsers: this.preloadedUsers(),
|
||||
|
|
@ -86,6 +87,7 @@ export default {
|
|||
operators: OPERATORS_IS_NOT,
|
||||
symbol: '@',
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
unique: true,
|
||||
fetchUsers,
|
||||
preloadedUsers: this.preloadedUsers(),
|
||||
|
|
|
|||
|
|
@ -0,0 +1,110 @@
|
|||
<script>
|
||||
import { GlSprintf, GlLink } from '@gitlab/ui';
|
||||
import { s__ } from '~/locale';
|
||||
import { PUSH_EVENT_REF_TYPE_BRANCH, PUSH_EVENT_REF_TYPE_TAG } from '../../constants';
|
||||
import ResourceParentLink from '../resource_parent_link.vue';
|
||||
import ContributionEventBase from './contribution_event_base.vue';
|
||||
|
||||
export default {
|
||||
name: 'ContributionEventPushed',
|
||||
i18n: {
|
||||
new: {
|
||||
[PUSH_EVENT_REF_TYPE_BRANCH]: s__(
|
||||
'ContributionEvent|Pushed a new branch %{refLink} in %{resourceParentLink}.',
|
||||
),
|
||||
[PUSH_EVENT_REF_TYPE_TAG]: s__(
|
||||
'ContributionEvent|Pushed a new tag %{refLink} in %{resourceParentLink}.',
|
||||
),
|
||||
},
|
||||
removed: {
|
||||
[PUSH_EVENT_REF_TYPE_BRANCH]: s__(
|
||||
'ContributionEvent|Deleted branch %{refLink} in %{resourceParentLink}.',
|
||||
),
|
||||
[PUSH_EVENT_REF_TYPE_TAG]: s__(
|
||||
'ContributionEvent|Deleted tag %{refLink} in %{resourceParentLink}.',
|
||||
),
|
||||
},
|
||||
pushed: {
|
||||
[PUSH_EVENT_REF_TYPE_BRANCH]: s__(
|
||||
'ContributionEvent|Pushed to branch %{refLink} in %{resourceParentLink}.',
|
||||
),
|
||||
[PUSH_EVENT_REF_TYPE_TAG]: s__(
|
||||
'ContributionEvent|Pushed to tag %{refLink} in %{resourceParentLink}.',
|
||||
),
|
||||
},
|
||||
multipleCommits: s__(
|
||||
'ContributionEvent|…and %{count} more commits. %{linkStart}Compare%{linkEnd}.',
|
||||
),
|
||||
},
|
||||
components: { ContributionEventBase, GlSprintf, GlLink, ResourceParentLink },
|
||||
props: {
|
||||
event: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
ref() {
|
||||
return this.event.ref;
|
||||
},
|
||||
commit() {
|
||||
return this.event.commit;
|
||||
},
|
||||
message() {
|
||||
if (this.ref.is_new) {
|
||||
return this.$options.i18n.new[this.ref.type];
|
||||
} else if (this.ref.is_removed) {
|
||||
return this.$options.i18n.removed[this.ref.type];
|
||||
}
|
||||
|
||||
return this.$options.i18n.pushed[this.ref.type];
|
||||
},
|
||||
iconName() {
|
||||
if (this.ref.is_removed) {
|
||||
return 'remove';
|
||||
}
|
||||
|
||||
return 'commit';
|
||||
},
|
||||
hasMultipleCommits() {
|
||||
return this.commit.count > 1;
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<contribution-event-base :event="event" :icon-name="iconName">
|
||||
<gl-sprintf :message="message">
|
||||
<template #refLink>
|
||||
<gl-link v-if="ref.path" :href="ref.path" class="gl-font-monospace">{{ ref.name }}</gl-link>
|
||||
<span v-else class="gl-font-monospace">{{ ref.name }}</span>
|
||||
</template>
|
||||
<template #resourceParentLink>
|
||||
<resource-parent-link :event="event" />
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
<template v-if="!ref.is_removed" #additional-info>
|
||||
<div>
|
||||
<gl-link :href="commit.path" class="gl-font-monospace">{{ commit.truncated_sha }}</gl-link>
|
||||
<template v-if="commit.title">
|
||||
·
|
||||
<span>{{ commit.title }}</span>
|
||||
</template>
|
||||
</div>
|
||||
<div v-if="hasMultipleCommits" class="gl-mt-2">
|
||||
<gl-sprintf :message="$options.i18n.multipleCommits">
|
||||
<template #count>{{ commit.count - 1 }}</template>
|
||||
<template #link="{ content }">
|
||||
<gl-link :href="commit.compare_path"
|
||||
>{{ content }}
|
||||
<span class="gl-font-monospace"
|
||||
>{{ commit.from_truncated_sha }}…{{ commit.to_truncated_sha }}</span
|
||||
></gl-link
|
||||
>
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
</div>
|
||||
</template>
|
||||
</contribution-event-base>
|
||||
</template>
|
||||
|
|
@ -5,11 +5,13 @@ import {
|
|||
EVENT_TYPE_EXPIRED,
|
||||
EVENT_TYPE_JOINED,
|
||||
EVENT_TYPE_LEFT,
|
||||
EVENT_TYPE_PUSHED,
|
||||
} from '../constants';
|
||||
import ContributionEventApproved from './contribution_event/contribution_event_approved.vue';
|
||||
import ContributionEventExpired from './contribution_event/contribution_event_expired.vue';
|
||||
import ContributionEventJoined from './contribution_event/contribution_event_joined.vue';
|
||||
import ContributionEventLeft from './contribution_event/contribution_event_left.vue';
|
||||
import ContributionEventPushed from './contribution_event/contribution_event_pushed.vue';
|
||||
|
||||
export default {
|
||||
props: {
|
||||
|
|
@ -116,6 +118,9 @@ export default {
|
|||
case EVENT_TYPE_LEFT:
|
||||
return ContributionEventLeft;
|
||||
|
||||
case EVENT_TYPE_PUSHED:
|
||||
return ContributionEventPushed;
|
||||
|
||||
default:
|
||||
return EmptyComponent;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,3 +12,7 @@ export const EVENT_TYPE_DESTROYED = 'destroyed';
|
|||
export const EVENT_TYPE_EXPIRED = 'expired';
|
||||
export const EVENT_TYPE_APPROVED = 'approved';
|
||||
export const EVENT_TYPE_PRIVATE = 'private';
|
||||
|
||||
// From app/models/push_event_payload.rb#L22
|
||||
export const PUSH_EVENT_REF_TYPE_BRANCH = 'branch';
|
||||
export const PUSH_EVENT_REF_TYPE_TAG = 'tag';
|
||||
|
|
|
|||
|
|
@ -233,6 +233,7 @@ export default {
|
|||
title: TOKEN_TITLE_ASSIGNEE,
|
||||
icon: 'user',
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
operators: OPERATORS_IS_NOT_OR,
|
||||
fetchUsers: this.fetchUsers,
|
||||
preloadedUsers,
|
||||
|
|
@ -243,6 +244,7 @@ export default {
|
|||
title: TOKEN_TITLE_AUTHOR,
|
||||
icon: 'pencil',
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
operators: OPERATORS_IS_NOT_OR,
|
||||
fetchUsers: this.fetchUsers,
|
||||
defaultUsers: [],
|
||||
|
|
|
|||
|
|
@ -365,6 +365,7 @@ export default {
|
|||
title: TOKEN_TITLE_AUTHOR,
|
||||
icon: 'pencil',
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
defaultUsers: [],
|
||||
operators: this.hasOrFeature ? OPERATORS_IS_NOT_OR : OPERATORS_IS_NOT,
|
||||
fetchUsers: this.fetchUsers,
|
||||
|
|
@ -376,6 +377,7 @@ export default {
|
|||
title: TOKEN_TITLE_ASSIGNEE,
|
||||
icon: 'user',
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
operators: this.hasOrFeature ? OPERATORS_IS_NOT_OR : OPERATORS_IS_NOT,
|
||||
fetchUsers: this.fetchUsers,
|
||||
recentSuggestionsStorageKey: `${this.fullPath}-issues-recent-tokens-assignee`,
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import { GlLoadingIcon, GlIcon, GlAlert } from '@gitlab/ui';
|
|||
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
|
||||
import { throttle, isEmpty } from 'lodash';
|
||||
import { mapGetters, mapState, mapActions } from 'vuex';
|
||||
import LogTopBar from 'ee_else_ce/jobs/components/job/job_log_controllers.vue';
|
||||
import SafeHtml from '~/vue_shared/directives/safe_html';
|
||||
import { isScrolledToBottom } from '~/lib/utils/scroll_utils';
|
||||
import { __, sprintf } from '~/locale';
|
||||
|
|
@ -13,7 +14,6 @@ import { MANUAL_STATUS } from '~/jobs/constants';
|
|||
import EmptyState from './empty_state.vue';
|
||||
import EnvironmentsBlock from './environments_block.vue';
|
||||
import ErasedBlock from './erased_block.vue';
|
||||
import LogTopBar from './job_log_controllers.vue';
|
||||
import StuckBlock from './stuck_block.vue';
|
||||
import UnmetPrerequisitesBlock from './unmet_prerequisites_block.vue';
|
||||
import Sidebar from './sidebar/sidebar.vue';
|
||||
|
|
|
|||
|
|
@ -178,6 +178,7 @@ export default {
|
|||
</script>
|
||||
<template>
|
||||
<div class="top-bar gl-display-flex gl-justify-content-space-between">
|
||||
<slot name="drawers"></slot>
|
||||
<!-- truncate information -->
|
||||
<div
|
||||
class="truncated-info gl-display-none gl-sm-display-flex gl-flex-wrap gl-align-items-center"
|
||||
|
|
@ -197,6 +198,7 @@ export default {
|
|||
<!-- eo truncate information -->
|
||||
|
||||
<div class="controllers">
|
||||
<slot name="controllers"> </slot>
|
||||
<gl-search-box-by-click
|
||||
v-model="searchTerm"
|
||||
class="gl-mr-3"
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import { GlToast } from '@gitlab/ui';
|
|||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import createDefaultClient from '~/lib/graphql';
|
||||
import { parseBoolean } from '~/lib/utils/common_utils';
|
||||
import JobApp from './components/job/job_app.vue';
|
||||
import createStore from './store';
|
||||
|
||||
|
|
@ -29,6 +30,7 @@ const initializeJobPage = (element) => {
|
|||
buildStatus,
|
||||
projectPath,
|
||||
retryOutdatedJobDocsUrl,
|
||||
aiRootCauseAnalysisAvailable,
|
||||
} = element.dataset;
|
||||
|
||||
return new Vue({
|
||||
|
|
@ -41,6 +43,7 @@ const initializeJobPage = (element) => {
|
|||
provide: {
|
||||
projectPath,
|
||||
retryOutdatedJobDocsUrl,
|
||||
aiRootCauseAnalysisAvailable: parseBoolean(aiRootCauseAnalysisAvailable),
|
||||
},
|
||||
render(createElement) {
|
||||
return createElement('job-app', {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
<script>
|
||||
import { GlLink } from '@gitlab/ui';
|
||||
import * as translations from '~/ml/model_registry/routes/models/index/translations';
|
||||
|
||||
export default {
|
||||
name: 'MlExperimentsIndexApp',
|
||||
components: {
|
||||
GlLink,
|
||||
},
|
||||
props: {
|
||||
models: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
i18n: translations,
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div>
|
||||
<div class="detail-page-header gl-flex-wrap">
|
||||
<div class="detail-page-header-body">
|
||||
<div class="page-title gl-flex-grow-1 gl-display-flex gl-align-items-center">
|
||||
<h2 class="gl-font-size-h-display gl-my-0">{{ $options.i18n.TITLE_LABEL }}</h2>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div v-for="model in models" :key="model.name">
|
||||
<gl-link :href="model.path"> {{ model.name }} / {{ model.version }} </gl-link>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
import MlModelsIndex from './components/ml_models_index.vue';
|
||||
|
||||
export default MlModelsIndex;
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
import { s__ } from '~/locale';
|
||||
|
||||
export const TITLE_LABEL = s__('MlExperimentTracking|Model registry');
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
import { initSimpleApp } from '~/helpers/init_simple_app_helper';
|
||||
import MlModelsIndex from '~/ml/model_registry/routes/models/index';
|
||||
|
||||
initSimpleApp('#js-index-ml-models', MlModelsIndex);
|
||||
|
|
@ -116,6 +116,7 @@ export default {
|
|||
unique: true,
|
||||
symbol: '@',
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
operators: OPERATORS_IS,
|
||||
fetchPath: this.projectPath,
|
||||
fetchUsers: Api.projectUsers.bind(Api),
|
||||
|
|
@ -127,6 +128,7 @@ export default {
|
|||
unique: true,
|
||||
symbol: '@',
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
operators: OPERATORS_IS,
|
||||
fetchPath: this.projectPath,
|
||||
fetchUsers: Api.projectUsers.bind(Api),
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class Projects::JobsController < Projects::ApplicationController
|
|||
before_action :verify_proxy_request!, only: :proxy_websocket_authorize
|
||||
before_action :push_job_log_jump_to_failures, only: [:show]
|
||||
before_action :reject_if_build_artifacts_size_refreshing!, only: [:erase]
|
||||
|
||||
before_action :push_ai_build_failure_cause, only: [:show]
|
||||
layout 'project'
|
||||
|
||||
feature_category :continuous_integration
|
||||
|
|
@ -258,4 +258,8 @@ class Projects::JobsController < Projects::ApplicationController
|
|||
def push_job_log_jump_to_failures
|
||||
push_frontend_feature_flag(:job_log_jump_to_failures, @project)
|
||||
end
|
||||
|
||||
def push_ai_build_failure_cause
|
||||
push_frontend_feature_flag(:ai_build_failure_cause, @project)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,9 +8,6 @@ module Projects
|
|||
|
||||
def index
|
||||
@models = ::Projects::Ml::ModelFinder.new(@project).execute
|
||||
|
||||
# TODO: Frontend rendering being added with https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124833
|
||||
render html: ::Ml::ModelsIndexPresenter.new(@models).present
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -0,0 +1,49 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Resolvers
|
||||
module Ci
|
||||
class RunnerJobCountResolver < BaseResolver
|
||||
include Gitlab::Graphql::Authorize::AuthorizeResource
|
||||
|
||||
type GraphQL::Types::Int, null: true
|
||||
|
||||
authorize :read_runner
|
||||
authorizes_object!
|
||||
|
||||
argument :statuses, [::Types::Ci::JobStatusEnum],
|
||||
required: false,
|
||||
description: 'Filter jobs by status.',
|
||||
alpha: { milestone: '16.2' }
|
||||
|
||||
alias_method :runner, :object
|
||||
|
||||
def resolve(statuses: nil)
|
||||
BatchLoader::GraphQL.for(runner.id).batch(key: [:job_count, statuses]) do |runner_ids, loader, _args|
|
||||
counts_by_runner = calculate_job_count_per_runner(runner_ids, statuses)
|
||||
|
||||
runner_ids.each do |runner_id|
|
||||
loader.call(runner_id, counts_by_runner[runner_id]&.count || 0)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def calculate_job_count_per_runner(runner_ids, statuses)
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
builds_tbl = ::Ci::Build.arel_table
|
||||
runners_tbl = ::Ci::Runner.arel_table
|
||||
lateral_query = ::Ci::Build.select(1).where(builds_tbl['runner_id'].eq(runners_tbl['id']))
|
||||
lateral_query = lateral_query.where(status: statuses) if statuses
|
||||
# We limit to 1 above the JOB_COUNT_LIMIT to indicate that more items exist after JOB_COUNT_LIMIT
|
||||
lateral_query = lateral_query.limit(::Types::Ci::RunnerType::JOB_COUNT_LIMIT + 1)
|
||||
::Ci::Runner.joins("JOIN LATERAL (#{lateral_query.to_sql}) builds_with_limit ON true")
|
||||
.id_in(runner_ids)
|
||||
.select(:id, Arel.star.count.as('count'))
|
||||
.group(:id)
|
||||
.index_by(&:id)
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -59,7 +59,8 @@ module Types
|
|||
deprecated: { reason: "Use field in `manager` object instead", milestone: '16.2' },
|
||||
description: 'IP address of the runner.'
|
||||
field :job_count, GraphQL::Types::Int, null: true,
|
||||
description: "Number of jobs processed by the runner (limited to #{JOB_COUNT_LIMIT}, plus one to indicate that more items exist)."
|
||||
description: "Number of jobs processed by the runner (limited to #{JOB_COUNT_LIMIT}, plus one to indicate that more items exist).",
|
||||
resolver: ::Resolvers::Ci::RunnerJobCountResolver
|
||||
field :job_execution_status,
|
||||
Types::Ci::RunnerJobExecutionStatusEnum,
|
||||
null: true,
|
||||
|
|
@ -126,28 +127,6 @@ module Types
|
|||
::MarkupHelper.markdown(object.maintenance_note, context.to_h.dup)
|
||||
end
|
||||
|
||||
def job_count
|
||||
BatchLoader::GraphQL.for(runner.id).batch(key: :job_count) do |runner_ids, loader, _args|
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
# We limit to 1 above the JOB_COUNT_LIMIT to indicate that more items exist after JOB_COUNT_LIMIT
|
||||
builds_tbl = ::Ci::Build.arel_table
|
||||
runners_tbl = ::Ci::Runner.arel_table
|
||||
lateral_query = ::Ci::Build.select(1)
|
||||
.where(builds_tbl['runner_id'].eq(runners_tbl['id']))
|
||||
.limit(JOB_COUNT_LIMIT + 1)
|
||||
counts = ::Ci::Runner.joins("JOIN LATERAL (#{lateral_query.to_sql}) builds_with_limit ON true")
|
||||
.id_in(runner_ids)
|
||||
.select(:id, Arel.star.count.as('count'))
|
||||
.group(:id)
|
||||
.index_by(&:id)
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
runner_ids.each do |runner_id|
|
||||
loader.call(runner_id, counts[runner_id]&.count || 0)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def admin_url
|
||||
Gitlab::Routing.url_helpers.admin_runner_url(runner) if can_admin_runners?
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,16 +2,16 @@
|
|||
|
||||
module Ci
|
||||
module JobsHelper
|
||||
def jobs_data
|
||||
def jobs_data(project, build)
|
||||
{
|
||||
"endpoint" => project_job_path(@project, @build, format: :json),
|
||||
"project_path" => @project.full_path,
|
||||
"endpoint" => project_job_path(project, build, format: :json),
|
||||
"project_path" => project.full_path,
|
||||
"artifact_help_url" => help_page_path('user/gitlab_com/index.md', anchor: 'gitlab-cicd'),
|
||||
"deployment_help_url" => help_page_path('user/project/clusters/deploy_to_cluster.md', anchor: 'troubleshooting'),
|
||||
"runner_settings_url" => project_runners_path(@build.project, anchor: 'js-runners-settings'),
|
||||
"page_path" => project_job_path(@project, @build),
|
||||
"build_status" => @build.status,
|
||||
"build_stage" => @build.stage_name,
|
||||
"runner_settings_url" => project_runners_path(build.project, anchor: 'js-runners-settings'),
|
||||
"page_path" => project_job_path(project, build),
|
||||
"build_status" => build.status,
|
||||
"build_stage" => build.stage_name,
|
||||
"log_state" => '',
|
||||
"build_options" => javascript_build_options,
|
||||
"retry_outdated_job_docs_url" => help_page_path('ci/pipelines/settings', anchor: 'retry-outdated-jobs')
|
||||
|
|
|
|||
|
|
@ -19,6 +19,8 @@ module Ci
|
|||
|
||||
delegate :avatar_path, :description, :name, :star_count, :forks_count, to: :project
|
||||
|
||||
enum state: { draft: 0, published: 1 }
|
||||
|
||||
def versions
|
||||
project.releases.order_released_desc
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,7 +16,8 @@ module CommitSignature
|
|||
unverified_key: 4,
|
||||
unknown_key: 5,
|
||||
multiple_signatures: 6,
|
||||
revoked_key: 7
|
||||
revoked_key: 7,
|
||||
verified_system: 8
|
||||
}
|
||||
|
||||
belongs_to :project, class_name: 'Project', foreign_key: 'project_id', optional: false
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ module HasUserType
|
|||
migration_bot: 7,
|
||||
security_bot: 8,
|
||||
automation_bot: 9,
|
||||
security_policy_bot: 10, # Currently not in use. See https://gitlab.com/gitlab-org/gitlab/-/issues/384174
|
||||
security_policy_bot: 10,
|
||||
admin_bot: 11,
|
||||
suggested_reviewers_bot: 12,
|
||||
service_account: 13,
|
||||
|
|
|
|||
|
|
@ -59,6 +59,7 @@ module VulnerabilityFindingHelpers
|
|||
evidence = Vulnerabilities::Finding::Evidence.new(data: report_finding.evidence.data) if report_finding.evidence
|
||||
|
||||
Vulnerabilities::Finding.new(finding_data).tap do |finding|
|
||||
finding.uuid = security_finding.uuid
|
||||
finding.location_fingerprint = report_finding.location.fingerprint
|
||||
finding.vulnerability = vulnerability_for(security_finding.uuid)
|
||||
finding.project = project
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
- title = _('Verified commit')
|
||||
- description = _('This commit was created in the GitLab UI, and signed with a GitLab-verified signature.')
|
||||
- locals = { signature: signature, title: title, description: description, label: _('Verified'), variant: 'success' }
|
||||
|
||||
= render partial: 'projects/commit/signature_badge', locals: locals
|
||||
|
|
@ -7,4 +7,4 @@
|
|||
|
||||
= render_if_exists "shared/shared_runners_minutes_limit_flash_message"
|
||||
|
||||
#js-job-page{ data: jobs_data }
|
||||
#js-job-page{ data: jobs_data(@project, @build) }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
- breadcrumb_title s_('ModelRegistry|Model registry')
|
||||
- page_title s_('ModelRegistry|Model registry')
|
||||
- presenter = ::Ml::ModelsIndexPresenter.new(@models)
|
||||
|
||||
#js-index-ml-models{ data: { view_model: presenter.present } }
|
||||
|
|
@ -30,7 +30,6 @@ module Gitlab
|
|||
|
||||
# Rails 7.0
|
||||
config.action_controller.raise_on_open_redirects = false
|
||||
config.action_controller.wrap_parameters_by_default = false
|
||||
config.action_dispatch.default_headers = { "X-Frame-Options" => "SAMEORIGIN",
|
||||
"X-XSS-Protection" => "1; mode=block",
|
||||
"X-Content-Type-Options" => "nosniff",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: group_analytics_dashboards
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/125337
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/416970
|
||||
milestone: '16.2'
|
||||
type: development
|
||||
group: group::optimize
|
||||
default_enabled: false
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# Be sure to restart your server when you modify this file.
|
||||
#
|
||||
# This file contains settings for ActionController::ParamsWrapper which
|
||||
# is enabled by default.
|
||||
|
||||
# Enable parameter wrapping for JSON. You can disable this by setting :format to an empty array.
|
||||
ActiveSupport.on_load(:action_controller) do
|
||||
wrap_parameters format: [:json]
|
||||
end
|
||||
|
||||
# Disable root element in JSON by default.
|
||||
ActiveSupport.on_load(:active_record) do
|
||||
self.include_root_in_json = false
|
||||
end
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddStateToCatalogResources < Gitlab::Database::Migration[2.1]
|
||||
DRAFT = 0
|
||||
|
||||
def change
|
||||
add_column :catalog_resources, :state, :smallint, null: false, limit: 1, default: DRAFT
|
||||
end
|
||||
end
|
||||
|
|
@ -1,24 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateVulnerabilityUuidTypeMigrationIndex < Gitlab::Database::Migration[2.1]
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'tmp_idx_vulns_on_converted_uuid'
|
||||
WHERE_CLAUSE = "uuid_convert_string_to_uuid = '00000000-0000-0000-0000-000000000000'::uuid"
|
||||
|
||||
def up
|
||||
add_concurrent_index(
|
||||
:vulnerability_occurrences,
|
||||
%i[id uuid],
|
||||
name: INDEX_NAME,
|
||||
where: WHERE_CLAUSE
|
||||
)
|
||||
# no-op due to https://gitlab.com/gitlab-com/gl-infra/production/-/issues/15983
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name(
|
||||
:vulnerability_occurrences,
|
||||
INDEX_NAME
|
||||
)
|
||||
# no-op due to https://gitlab.com/gitlab-com/gl-infra/production/-/issues/15983
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RemoveUserDetailsProvisionedByGroupAtColumn < Gitlab::Database::Migration[2.1]
|
||||
enable_lock_retries!
|
||||
|
||||
def change
|
||||
remove_column :user_details, :provisioned_by_group_at, :datetime_with_timezone
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
838865785ae99586e6c4017b5c51dee845208501a66fd4a3890641be0c076dfc
|
||||
|
|
@ -0,0 +1 @@
|
|||
3035da4873093dd7c8cee94398c3b4a70a696e1e765d102cfe657b53e3b65be9
|
||||
|
|
@ -12931,7 +12931,8 @@ ALTER SEQUENCE bulk_imports_id_seq OWNED BY bulk_imports.id;
|
|||
CREATE TABLE catalog_resources (
|
||||
id bigint NOT NULL,
|
||||
project_id bigint NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
state smallint DEFAULT 0 NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE catalog_resources_id_seq
|
||||
|
|
@ -23632,7 +23633,6 @@ CREATE TABLE user_details (
|
|||
password_last_changed_at timestamp with time zone DEFAULT now() NOT NULL,
|
||||
onboarding_step_url text,
|
||||
discord text DEFAULT ''::text NOT NULL,
|
||||
provisioned_by_group_at timestamp with time zone,
|
||||
enterprise_group_id bigint,
|
||||
enterprise_group_associated_at timestamp with time zone,
|
||||
CONSTRAINT check_245664af82 CHECK ((char_length(webauthn_xid) <= 100)),
|
||||
|
|
@ -33658,8 +33658,6 @@ CREATE INDEX tmp_idx_vuln_reads_where_dismissal_reason_null ON vulnerability_rea
|
|||
|
||||
CREATE INDEX tmp_idx_vulnerability_occurrences_on_id_where_report_type_7_99 ON vulnerability_occurrences USING btree (id) WHERE (report_type = ANY (ARRAY[7, 99]));
|
||||
|
||||
CREATE INDEX tmp_idx_vulns_on_converted_uuid ON vulnerability_occurrences USING btree (id, uuid) WHERE (uuid_convert_string_to_uuid = '00000000-0000-0000-0000-000000000000'::uuid);
|
||||
|
||||
CREATE INDEX tmp_index_ci_job_artifacts_on_expire_at_where_locked_unknown ON ci_job_artifacts USING btree (expire_at, job_id) WHERE ((locked = 2) AND (expire_at IS NOT NULL));
|
||||
|
||||
CREATE INDEX tmp_index_cis_vulnerability_reads_on_id ON vulnerability_reads USING btree (id) WHERE (report_type = 7);
|
||||
|
|
|
|||
|
|
@ -1679,7 +1679,7 @@ Repository check failures on a Geo secondary site do not necessarily imply a rep
|
|||
|
||||
1. Find affected repositories as mentioned below, as well as their [logged errors](../../repository_checks.md#what-to-do-if-a-check-failed).
|
||||
1. Try to diagnose specific `git fsck` errors. The range of possible errors is wide, try putting them into search engines.
|
||||
1. Test normal functions of the affected repositories. Pull from the secondary, view the files.
|
||||
1. Test typical functions of the affected repositories. Pull from the secondary, view the files.
|
||||
1. Check if the primary site's copy of the repository has an identical `git fsck` error. If you are planning a failover, then consider prioritizing that the secondary site has the same information that the primary site has. Ensure you have a backup of the primary, and follow [planned failover guidelines](../disaster_recovery/planned_failover.md).
|
||||
1. Push to the primary and check if the change gets replicated to the secondary site.
|
||||
1. If replication is not automatically working, try to manually sync the repository.
|
||||
|
|
|
|||
|
|
@ -2265,7 +2265,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
|
|||
### Cluster topology
|
||||
|
||||
The following tables and diagram detail the hybrid environment using the same formats
|
||||
as the normal environment above.
|
||||
as the typical environment above.
|
||||
|
||||
First are the components that run in Kubernetes. These run across several node groups, although you can change
|
||||
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
|
||||
|
|
|
|||
|
|
@ -2283,7 +2283,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
|
|||
### Cluster topology
|
||||
|
||||
The following tables and diagram detail the hybrid environment using the same formats
|
||||
as the normal environment above.
|
||||
as the typical environment above.
|
||||
|
||||
First are the components that run in Kubernetes. These run across several node groups, although you can change
|
||||
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
|
||||
|
|
|
|||
|
|
@ -963,7 +963,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
|
|||
### Cluster topology
|
||||
|
||||
The following tables and diagram detail the hybrid environment using the same formats
|
||||
as the normal environment above.
|
||||
as the typical environment above.
|
||||
|
||||
First are the components that run in Kubernetes. These run across several node groups, although you can change
|
||||
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
|
||||
|
|
|
|||
|
|
@ -2272,7 +2272,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
|
|||
### Cluster topology
|
||||
|
||||
The following tables and diagram detail the hybrid environment using the same formats
|
||||
as the normal environment above.
|
||||
as the typical environment above.
|
||||
|
||||
First are the components that run in Kubernetes. These run across several node groups, although you can change
|
||||
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
|
||||
|
|
|
|||
|
|
@ -2282,7 +2282,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
|
|||
### Cluster topology
|
||||
|
||||
The following tables and diagram detail the hybrid environment using the same formats
|
||||
as the normal environment above.
|
||||
as the typical environment above.
|
||||
|
||||
First are the components that run in Kubernetes. These run across several node groups, although you can change
|
||||
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
|
||||
|
|
|
|||
|
|
@ -2240,7 +2240,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
|
|||
### Cluster topology
|
||||
|
||||
The following tables and diagram detail the hybrid environment using the same formats
|
||||
as the normal environment above.
|
||||
as the typical environment above.
|
||||
|
||||
First are the components that run in Kubernetes. These run across several node groups, although you can change
|
||||
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
---
|
||||
stage: Data Stores
|
||||
group: Database
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
---
|
||||
|
||||
# Database migrations API **(FREE SELF)**
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/123408) in GitLab 16.2.
|
||||
|
||||
This API is for managing database migrations used in the development of GitLab.
|
||||
|
||||
All methods require administrator authorization.
|
||||
|
||||
## Mark a migration as successful
|
||||
|
||||
Mark pending migrations as successfully executed to prevent them from being
|
||||
executed by the `db:migrate` tasks. Use this API to skip failing
|
||||
migrations after they are determined to be safe to skip.
|
||||
|
||||
```plaintext
|
||||
POST /api/v4/admin/migrations/:version/mark
|
||||
```
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
|-----------------|----------------|----------|----------------------------------------------------------------------------------|
|
||||
| `version` | integer | yes | Version timestamp of the migration to be skipped |
|
||||
| `database` | string | no | The database name for which the migration is skipped. Defaults to `main`. |
|
||||
|
||||
```shell
|
||||
curl --header "PRIVATE-TOKEN: <your_access_token>" \
|
||||
--url "https://gitlab.example.com/api/v4/admin/migrations/:version/mark"
|
||||
```
|
||||
|
|
@ -13160,7 +13160,6 @@ CI/CD variables for a project.
|
|||
| <a id="cirunnergroups"></a>`groups` | [`GroupConnection`](#groupconnection) | Groups the runner is associated with. For group runners only. (see [Connections](#connections)) |
|
||||
| <a id="cirunnerid"></a>`id` | [`CiRunnerID!`](#cirunnerid) | ID of the runner. |
|
||||
| <a id="cirunneripaddress"></a>`ipAddress` **{warning-solid}** | [`String`](#string) | **Deprecated** in 16.2. Use field in `manager` object instead. |
|
||||
| <a id="cirunnerjobcount"></a>`jobCount` | [`Int`](#int) | Number of jobs processed by the runner (limited to 1000, plus one to indicate that more items exist). |
|
||||
| <a id="cirunnerjobexecutionstatus"></a>`jobExecutionStatus` **{warning-solid}** | [`CiRunnerJobExecutionStatus`](#cirunnerjobexecutionstatus) | **Introduced** in 15.7. This feature is an Experiment. It can be changed or removed at any time. Job execution status of the runner. |
|
||||
| <a id="cirunnerlocked"></a>`locked` | [`Boolean`](#boolean) | Indicates the runner is locked. |
|
||||
| <a id="cirunnermaintenancenote"></a>`maintenanceNote` | [`String`](#string) | Runner's maintenance notes. |
|
||||
|
|
@ -13186,6 +13185,18 @@ CI/CD variables for a project.
|
|||
|
||||
#### Fields with arguments
|
||||
|
||||
##### `CiRunner.jobCount`
|
||||
|
||||
Number of jobs processed by the runner (limited to 1000, plus one to indicate that more items exist).
|
||||
|
||||
Returns [`Int`](#int).
|
||||
|
||||
###### Arguments
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="cirunnerjobcountstatuses"></a>`statuses` **{warning-solid}** | [`[CiJobStatus!]`](#cijobstatus) | **Introduced** in 16.2. This feature is an Experiment. It can be changed or removed at any time. Filter jobs by status. |
|
||||
|
||||
##### `CiRunner.jobs`
|
||||
|
||||
Jobs assigned to the runner. This field can only be resolved for one runner in any single request.
|
||||
|
|
@ -26727,6 +26738,7 @@ Verification status of a GPG or X.509 signature for a commit.
|
|||
| <a id="verificationstatusunverified"></a>`UNVERIFIED` | unverified verification status. |
|
||||
| <a id="verificationstatusunverified_key"></a>`UNVERIFIED_KEY` | unverified_key verification status. |
|
||||
| <a id="verificationstatusverified"></a>`VERIFIED` | verified verification status. |
|
||||
| <a id="verificationstatusverified_system"></a>`VERIFIED_SYSTEM` | verified_system verification status. |
|
||||
|
||||
### `VisibilityLevelsEnum`
|
||||
|
||||
|
|
|
|||
|
|
@ -304,6 +304,10 @@ Keyset pagination supports only `order_by=id`. Other sorting options aren't avai
|
|||
Get a list of visible projects owned by the given user. When accessed without
|
||||
authentication, only public projects are returned.
|
||||
|
||||
Prerequisite:
|
||||
|
||||
- To view [certain attributes](https://gitlab.com/gitlab-org/gitlab/-/blob/520776fa8e5a11b8275b7c597d75246fcfc74c89/lib/api/entities/project.rb#L109-130), you must be an administrator or have the Owner role for the project.
|
||||
|
||||
NOTE:
|
||||
Only the projects in the user's (specified in `user_id`) namespace are returned. Projects owned by the user in any group or subgroups are not returned. An empty list is returned if a profile is set to private.
|
||||
|
||||
|
|
|
|||
|
|
@ -135,7 +135,7 @@ Example response:
|
|||
```json
|
||||
{
|
||||
"name": "Ruby",
|
||||
"content": "# This file is a template, and might need editing before it works on your project.\n# To contribute improvements to CI/CD templates, please follow the Development guide at:\n# https://docs.gitlab.com/ee/development/cicd/templates.html\n# This specific template is located at:\n# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Ruby.gitlab-ci.yml\n\n# Official language image. Look for the different tagged releases at:\n# https://hub.docker.com/r/library/ruby/tags/\nimage: ruby:latest\n\n# Pick zero or more services to be used on all builds.\n# Only needed when using a docker container to run your tests in.\n# Check out: https://docs.gitlab.com/ee/ci/services/index.html\nservices:\n - mysql:latest\n - redis:latest\n - postgres:latest\n\nvariables:\n POSTGRES_DB: database_name\n\n# Cache gems in between builds\ncache:\n paths:\n - vendor/ruby\n\n# This is a basic example for a gem or script which doesn't use\n# services such as redis or postgres\nbefore_script:\n - ruby -v # Print out ruby version for debugging\n # Uncomment next line if your rails app needs a JS runtime:\n # - apt-get update -q \u0026\u0026 apt-get install nodejs -yqq\n - bundle config set --local deployment true # Install dependencies into ./vendor/ruby\n - bundle install -j $(nproc)\n\n# Optional - Delete if not using `rubocop`\nrubocop:\n script:\n - rubocop\n\nrspec:\n script:\n - rspec spec\n\nrails:\n variables:\n DATABASE_URL: \"postgresql://postgres:postgres@postgres:5432/$POSTGRES_DB\"\n script:\n - rails db:migrate\n - rails db:seed\n - rails test\n\n# This deploy job uses a simple deploy flow to Heroku, other providers, e.g. AWS Elastic Beanstalk\n# are supported too: https://github.com/travis-ci/dpl\ndeploy:\n stage: deploy\n environment: production\n script:\n - gem install dpl\n - dpl --provider=heroku --app=$HEROKU_APP_NAME --api-key=$HEROKU_PRODUCTION_KEY\n"
|
||||
"content": "# This file is a template, and might need editing before it works on your project.\n# To contribute improvements to CI/CD templates, please follow the Development guide at:\n# https://docs.gitlab.com/ee/development/cicd/templates.html\n# This specific template is located at:\n# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Ruby.gitlab-ci.yml\n\n# Official language image. Look for the different tagged releases at:\n# https://hub.docker.com/r/library/ruby/tags/\nimage: ruby:latest\n\n# Pick zero or more services to be used on all builds.\n# Only needed when using a docker container to run your tests in.\n# Check out: https://docs.gitlab.com/ee/ci/services/index.html\nservices:\n - mysql:latest\n - redis:latest\n - postgres:latest\n\nvariables:\n POSTGRES_DB: database_name\n\n# Cache gems in between builds\ncache:\n paths:\n - vendor/ruby\n\n# This is a basic example for a gem or script which doesn't use\n# services such as redis or postgres\nbefore_script:\n - ruby -v # Print out ruby version for debugging\n # Uncomment next line if your rails app needs a JS runtime:\n # - apt-get update -q \u0026\u0026 apt-get install nodejs -yqq\n - bundle config set --local deployment true # Install dependencies into ./vendor/ruby\n - bundle install -j $(nproc)\n\n# Optional - Delete if not using `rubocop`\nrubocop:\n script:\n - rubocop\n\nrspec:\n script:\n - rspec spec\n\nrails:\n variables:\n DATABASE_URL: \"postgresql://postgres:postgres@postgres:5432/$POSTGRES_DB\"\n script:\n - rails db:migrate\n - rails db:seed\n - rails test\n\n# This deploy job uses a simple deploy flow to Heroku, other providers, for example, AWS Elastic Beanstalk\n# are supported too: https://github.com/travis-ci/dpl\ndeploy:\n stage: deploy\n environment: production\n script:\n - gem install dpl\n - dpl --provider=heroku --app=$HEROKU_APP_NAME --api-key=$HEROKU_PRODUCTION_KEY\n"
|
||||
}
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -25,9 +25,9 @@ and also Git repository data.
|
|||
|
||||
## 2. Data flow
|
||||
|
||||
Each cell has a number of application databases to back up (e.g. `main`, and `ci`).
|
||||
Each cell has a number of application databases to back up (for example, `main`, and `ci`).
|
||||
|
||||
Additionally, there may be cluster-wide metadata tables (e.g. `users` table)
|
||||
Additionally, there may be cluster-wide metadata tables (for example, `users` table)
|
||||
which is directly accessible via PostgreSQL.
|
||||
|
||||
## 3. Proposal
|
||||
|
|
|
|||
|
|
@ -25,10 +25,10 @@ GitLab has a lot of
|
|||
[secrets](https://docs.gitlab.com/charts/installation/secrets.html) that needs
|
||||
to be configured.
|
||||
|
||||
Some secrets are for inter-component communication, e.g. `GitLab Shell secret`,
|
||||
Some secrets are for inter-component communication, for example, `GitLab Shell secret`,
|
||||
and used only within a cell.
|
||||
|
||||
Some secrets are used for features, e.g. `ci_jwt_signing_key`.
|
||||
Some secrets are used for features, for example, `ci_jwt_signing_key`.
|
||||
|
||||
## 2. Data flow
|
||||
|
||||
|
|
|
|||
|
|
@ -190,7 +190,7 @@ information. For example:
|
|||
by one of the Cells, and the results of that can be cached. We also need to implement
|
||||
a mechanism for negative cache and cache eviction.
|
||||
|
||||
1. **GraphQL and other ambigious endpoints.**
|
||||
1. **GraphQL and other ambiguous endpoints.**
|
||||
|
||||
Most endpoints have a unique sharding key: the organization, which directly
|
||||
or indirectly (via a group or project) can be used to classify endpoints.
|
||||
|
|
|
|||
|
|
@ -429,7 +429,7 @@ sequenceDiagram
|
|||
```
|
||||
|
||||
In this case the user is not on their "default organization" so their TODO
|
||||
counter will not include their normal todos. We may choose to highlight this in
|
||||
counter will not include their typical todos. We may choose to highlight this in
|
||||
the UI somewhere. A future iteration may be able to fetch that for them from
|
||||
their default organization.
|
||||
|
||||
|
|
|
|||
|
|
@ -452,7 +452,7 @@ sequenceDiagram
|
|||
```
|
||||
|
||||
In this case the user is not on their "default organization" so their TODO
|
||||
counter will not include their normal todos. We may choose to highlight this in
|
||||
counter will not include their typical todos. We may choose to highlight this in
|
||||
the UI somewhere. A future iteration may be able to fetch that for them from
|
||||
their default organization.
|
||||
|
||||
|
|
|
|||
|
|
@ -208,7 +208,7 @@ Gitlab::Database::Writer.config do |config|
|
|||
# then backend-specific configurations hereafter
|
||||
#
|
||||
config.url = 'tcp://user:pwd@localhost:9000/database'
|
||||
# e.g. a serializer helps define how data travels over the wire
|
||||
# for example, a serializer helps define how data travels over the wire
|
||||
config.json_serializer = ClickHouse::Serializer::JsonSerializer
|
||||
# ...
|
||||
end
|
||||
|
|
|
|||
|
|
@ -35,6 +35,10 @@ As ClickHouse has already been selected for use at GitLab, our main goal now is
|
|||
1. Launch: Support ClickHouse-backed features for SaaS and self-managed.
|
||||
1. Improve: Successfully scale our usage of ClickHouse.
|
||||
|
||||
### Non-goals
|
||||
|
||||
ClickHouse will not be packaged by default with self-managed GitLab, due to uncertain need, complexity, and lack of operational experience. We will still work to find the best possible way to enable users to use ClickHouse themselves if they desire, but it will not be on by default. [ClickHouse maintenance and cost](self_managed_costs_and_requirements/index.md) investigations revealed an uncertain cost impact to smaller instances, and at this time unknown nuance to managing ClickHouse. This means features that depend only on ClickHouse will not be available out of the box for self-managed users (as of end of 2022, the majority of revenue comes from self-managed), so new features researching the use of ClickHouse should be aware of the potential impacts to user adoption in the near-term, until a solution is viable.
|
||||
|
||||
## Proposals
|
||||
|
||||
The following are links to proposals in the form of blueprints that address technical challenges to using ClickHouse across a wide variety of features.
|
||||
|
|
@ -44,6 +48,20 @@ The following are links to proposals in the form of blueprints that address tech
|
|||
1. [Abstraction layer](../clickhouse_read_abstraction_layer/index.md) for features to leverage both ClickHouse and PostgreSQL.
|
||||
- What are the benefits and tradeoffs? For example, how would this impact our automated migration and query testing?
|
||||
|
||||
### Product roadmap
|
||||
|
||||
#### Near-term
|
||||
|
||||
In the next 3 months (FY24 Q2) ClickHouse will be implemented by default only for SaaS on GitLab.com or manual enablement for self-managed instances. This is due to the uncertain costs and management requirements for self-managed instances. This near-term implementation will be used to develop best practices and strategy to direct self-managed users. This will also constantly shape our recommendations for self-managed instances that want to onboard ClickHouse early.
|
||||
|
||||
#### Mid-term
|
||||
|
||||
After we have formulated best practices of managing ClickHouse ourselves for GitLab.com, the plan for 3-9 months (FY24 2H) will be to offer supported recommendations for self-managed instances that want to run ClickHouse themselves or potentially to a ClickHouse cluster/VM we would manage for users. One proposal for self-managed users is to [create a proxy or abstraction layer](https://gitlab.com/groups/gitlab-org/-/epics/308) that would allow users to connect their self-managed instance to SaaS without additional effort. Another option would be to allow users to "Bring your own ClickHouse" similar to our [approach for Elasticsearch](../../../integration/advanced_search/elasticsearch.md#install-elasticsearch). For the features that require ClickHouse for optimal usage (Value Streams Dashboard, [Product Analytics](https://gitlab.com/groups/gitlab-org/-/epics/8921) and Observability), this will be the initial go-to-market action.
|
||||
|
||||
#### Long-term
|
||||
|
||||
We will work towards a packaged reference version of ClickHouse capable of being easily managed with minimal cost increases for self-managed users. We should be able to reliably instruct users on the management of ClickHouse and provide accurate costs for usage. This will mean any feature could depend on ClickHouse without decreasing end-user exposure.
|
||||
|
||||
## Best Practices
|
||||
|
||||
Best practices and guidelines for developing performant, secure, and scalable features using ClickHouse are located in the [ClickHouse developer documentation](../../../development/database/clickhouse/index.md).
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ GitLab code search functionality today is backed by Elasticsearch.
|
|||
Elasticsearch has proven useful for other types of search (issues, merge
|
||||
requests, comments and so-on) but is by design not a good choice for code
|
||||
search where users expect matches to be precise (ie. no false positives) and
|
||||
flexible (e.g. support
|
||||
flexible (for example, support
|
||||
[substring matching](https://gitlab.com/gitlab-org/gitlab/-/issues/325234)
|
||||
and
|
||||
[regexes](https://gitlab.com/gitlab-org/gitlab/-/issues/4175)). We have
|
||||
|
|
|
|||
|
|
@ -266,7 +266,7 @@ The expected registry behavior will be covered with integration tests by manipul
|
|||
|
||||
##### Latency
|
||||
|
||||
Excessive latency on established connections is hard to detect and debug, as these might not raise an application error or network timeout in normal circumstances but usually precede them.
|
||||
Excessive latency on established connections is hard to detect and debug, as these might not raise an application error or network timeout in typical circumstances but usually precede them.
|
||||
|
||||
For this reason, the duration of database queries used to serve HTTP API requests should be instrumented using metrics, allowing the detection of unusual variations and trigger alarms accordingly before an excessive latency becomes a timeout or service unavailability.
|
||||
|
||||
|
|
|
|||
|
|
@ -180,14 +180,14 @@ is complete and the registry is ready to make full use of the metadata database.
|
|||
For users with large registries and who are interested in the minimum possible
|
||||
downtime, each step can be ran independently when the tool is passed the appropriate
|
||||
flag. The user will first run the pre-import step while the registry is
|
||||
performing its normal workload. Once that has completed, and the user is ready
|
||||
performing its usual workload. Once that has completed, and the user is ready
|
||||
to stop writes to the registry, the tag import step can be ran. As with the GitLab.com
|
||||
migration, importing tags requires that the registry be offline or in
|
||||
read-only mode. This step does the minimum possible work to achieve fast and
|
||||
efficient tag imports and will always be the fastest of the three steps, reducing
|
||||
the downtime component to a fraction of the total import time. The user can then
|
||||
bring up the registry configured to use the metadata database. After that, the
|
||||
user is free to run the third step during normal registry operations. This step
|
||||
user is free to run the third step during standard registry operations. This step
|
||||
makes any dangling blobs in common storage visible to the database and therefore
|
||||
the online garbage collection process.
|
||||
|
||||
|
|
|
|||
|
|
@ -301,8 +301,8 @@ If no resources are found, this is likely that the users have not embedded these
|
|||
### Dependency graph
|
||||
|
||||
- GitLab frontend uses [Owner References](https://kubernetes.io/docs/concepts/overview/working-with-objects/owners-dependents/) to idenfity the dependencies between resources. These are embedded in resources as `metadata.ownerReferences` field.
|
||||
- For the resoruces that don't have owner references, we can use [Well-Known Labels, Annotations and Taints](https://kubernetes.io/docs/reference/labels-annotations-taints/) as complement. e.g. `EndpointSlice` doesn't have `metadata.ownerReferences`, but has `kubernetes.io/service-name` as a reference to the parent `Service` resource.
|
||||
- For the resoruces that don't have owner references, we can use [Well-Known Labels, Annotations and Taints](https://kubernetes.io/docs/reference/labels-annotations-taints/) as complement. for example, `EndpointSlice` doesn't have `metadata.ownerReferences`, but has `kubernetes.io/service-name` as a reference to the parent `Service` resource.
|
||||
|
||||
### Health status of resources
|
||||
|
||||
- GitLab frontend computes the status summary from the fetched resources. Something similar to ArgoCD's [Resource Health](https://argo-cd.readthedocs.io/en/stable/operator-manual/health/) e.g. `Healthy`, `Progressing`, `Degraded` and `Suspended`. The formula is TBD.
|
||||
- GitLab frontend computes the status summary from the fetched resources. Something similar to ArgoCD's [Resource Health](https://argo-cd.readthedocs.io/en/stable/operator-manual/health/) for example, `Healthy`, `Progressing`, `Degraded` and `Suspended`. The formula is TBD.
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ The aforementioned goals can further be broken down into the following four sub-
|
|||
NOTE:
|
||||
Although remote_write_sender does not test the correctness of a remote write receiver itself as is our case, it does bring some inspiration to implement/develop one within the scope of this project.
|
||||
|
||||
- We aim to also ensure compatibility for special Prometheus data types, e.g. Prometheus histogram(s), summary(s).
|
||||
- We aim to also ensure compatibility for special Prometheus data types, for example, Prometheus histogram(s), summary(s).
|
||||
|
||||
#### Reading data
|
||||
|
||||
|
|
@ -82,13 +82,13 @@ Worth noting that we intend to model exemplars the same way we’re modeling met
|
|||
|
||||
## Proposal
|
||||
|
||||
We intend to use GitLab Observability Backend as a framework for the Metrics implementation so that its lifecycle is also managed via already existing Kubernetes controllers e.g. scheduler, tenant-operator.
|
||||
We intend to use GitLab Observability Backend as a framework for the Metrics implementation so that its lifecycle is also managed via already existing Kubernetes controllers for example, scheduler, tenant-operator.
|
||||
|
||||

|
||||
|
||||
From a development perspective, what’s been marked as our “Application Server” above needs to be developed as a part of this proposal while the remaining peripheral components either already exist or can be provisioned via existing code in `scheduler`/`tenant-operator`.
|
||||
|
||||
**On the write path**, we expect to receive incoming data via `HTTP`/`gRPC` `Ingress` similar to what we do for our existing services, e.g. errortracking, tracing.
|
||||
**On the write path**, we expect to receive incoming data via `HTTP`/`gRPC` `Ingress` similar to what we do for our existing services, for example, errortracking, tracing.
|
||||
|
||||
NOTE:
|
||||
Additionally, since we intend to ingest data via Prometheus `remote_write` API, the received data will be Protobuf-encoded, Snappy-compressed. All received data therefore needs to be decompressed & decoded to turn it into a set of `prompb.TimeSeries` objects, which the rest of our components interact with.
|
||||
|
|
@ -546,7 +546,7 @@ value: 0
|
|||
|
||||
On the read path, we first query all timeseries identifiers by searching for the labels under consideration. Once we have all the `series_id`(s), we then look up all corresponding samples between the query start timestamp and end timestamp.
|
||||
|
||||
For e.g.
|
||||
For example:
|
||||
|
||||
```plaintext
|
||||
kernel{service_environment=~"prod.*", measurement="boot_time"}
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ These gems as still part of the monorepo.
|
|||
|
||||
From the research in [Proposal: split GitLab monolith into components](https://gitlab.com/gitlab-org/gitlab/-/issues/365293)
|
||||
it seems that following [product categories](https://about.gitlab.com/handbook/product/categories/#hierarchy), as a guideline,
|
||||
would be much better than translating organization structure into folder structure (e.g. `app/modules/verify/pipeline-execution/...`).
|
||||
would be much better than translating organization structure into folder structure (for example, `app/modules/verify/pipeline-execution/...`).
|
||||
|
||||
However, this guideline alone is not sufficient and we need a more specific strategy:
|
||||
|
||||
|
|
@ -78,7 +78,7 @@ Start with listing all the Ruby files in a spreadsheet and categorize them into
|
|||
Some of them are already pretty explicit like Ci::, Packages::, etc. Components should follow our
|
||||
[existing naming guide](../../../development/software_design.md#use-namespaces-to-define-bounded-contexts).
|
||||
|
||||
This could be a short-lived Working Group with representative members of each DevOps stage (e.g. Senior+ engineers).
|
||||
This could be a short-lived Working Group with representative members of each DevOps stage (for example, Senior+ engineers).
|
||||
The WG would help defining high-level components and will be the DRIs for driving the changes in their respective DevOps stage.
|
||||
|
||||
### 3. Publish the list of bounded contexts
|
||||
|
|
@ -107,13 +107,13 @@ With this static list we could:
|
|||
## Glossary
|
||||
|
||||
- `modules` are Ruby modules and can be used to nest code hierarchically.
|
||||
- `namespaces` are unique hierarchies of Ruby constants. E.g. `Ci::` but also `Ci::JobArtifacts::` or `Ci::Pipeline::Chain::`.
|
||||
- `namespaces` are unique hierarchies of Ruby constants. For example, `Ci::` but also `Ci::JobArtifacts::` or `Ci::Pipeline::Chain::`.
|
||||
- `packages` are Packwerk packages to group together related functionalities. These packages can be big or small depending on the design and architecture. Inside a package all constants (classes and modules) have the same namespace. For example:
|
||||
- In a package `ci`, all the classes would be nested under `Ci::` namespace. There can be also nested namespaces like `Ci::PipelineProcessing::`.
|
||||
- In a package `ci-pipeline_creation` all classes are nested under `Ci::PipelineCreation`, like `Ci::PipelineCreation::Chain::Command`.
|
||||
- In a package `ci` a class named `MergeRequests::UpdateHeadPipelineService` would not be allowed because it would not match the package's namespace.
|
||||
- This can be enforced easily with [Packwerk's based Rubocop Cops](https://github.com/rubyatscale/rubocop-packs/blob/main/lib/rubocop/cop/packs/root_namespace_is_pack_name.rb).
|
||||
- `bounded context` is a top-level Packwerk package that represents a macro aspect of the domain. For example: `Ci::`, `MergeRequests::`, `Packages::`, etc.
|
||||
- A bounded context is represented by a single Ruby module/namespace. E.g. `Ci::` and not `Ci::JobArtifacts::`.
|
||||
- A bounded context is represented by a single Ruby module/namespace. For example, `Ci::` and not `Ci::JobArtifacts::`.
|
||||
- A bounded context can be made of 1 or multiple Packwerk packages. Nested packages would be recommended if the domain is quite complex and we want to enforce privacy among all the implementation details. For example: `Ci::PipelineProcessing::` and `Ci::PipelineCreation::` could be separate packages of the same bounded context and expose their public API while keeping implementation details private.
|
||||
- A new bounded context like `RemoteDevelopment::` can be represented a single package while large and complex bounded contexts like `Ci::` would need to be organized into smaller/nested packages.
|
||||
|
|
|
|||
|
|
@ -805,10 +805,10 @@ pools as it will always match the contents of the upstream repository.
|
|||
|
||||
It has a number of downsides though:
|
||||
|
||||
- Normal repositories can now have different states, where some of the
|
||||
- Repositories can now have different states, where some of the
|
||||
repositories are allowed to prune objects and others aren't. This introduces a
|
||||
source of uncertainty and makes it easy to accidentally delete objects in a
|
||||
normal repository and thus corrupt its forks.
|
||||
repository and thus corrupt its forks.
|
||||
|
||||
- When upstream repositories go private we must stop updating objects which are
|
||||
supposed to be deduplicated across members of the fork network. This means
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ One data ingestion pipeline will be deployed for each top level GitLab namespace
|
|||
|
||||
- Beyond rate limits, resource limits can be enforced per user such that no user can steal more system resources (memory, cpu) than allocated.
|
||||
- Fine grained control of horizontal scaling for each user pipeline by adding more OTEL Collector instances
|
||||
- Manage the users tenant in accordance to GitLab subscription tier, e.g. quota, throughput, cold storage, shard to different databases
|
||||
- Manage the users tenant in accordance to GitLab subscription tier, for example, quota, throughput, cold storage, shard to different databases
|
||||
- Reduced complexity and enhanced security in the pipeline by leveraging off the shelf components like the [OpenTelemetry Collector](https://opentelemetry.io/docs/concepts/components/#collector) where data within that collector belongs to no more than a single user/customer.
|
||||
|
||||
A pipeline is only deployed for the user upon enabling observability in the project settings, in the same way a user can enable error tracking for their project. When observability is enabled for any project in the users namespace, a pipeline will be deployed. This deployment is automated by our Kubernetes scheduler-operator and tenant-operator. Provisioning is currently managed through the iframe, but a preferred method would be to provision using a RESTful API. The GitLab UI would have a section in project settings that allow a user to "enable observability", much like they do for error tracking today.
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ If you want to [contribute to GitLab](https://about.gitlab.com/community/contrib
|
|||
`gitlab-shared-runners-manager-X.gitlab.com` fleet of runners, dedicated for GitLab projects and related community forks.
|
||||
|
||||
These runners are backed by the same machine type as our `small` runners.
|
||||
Unlike the normal SaaS runners on Linux, each virtual machine is re-used up to 40 times.
|
||||
Unlike the most commonly used SaaS runners on Linux, each virtual machine is re-used up to 40 times.
|
||||
|
||||
As we want to encourage people to contribute, these runners are free of charge.
|
||||
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ Some jobs use images that are built from external projects:
|
|||
[`auto-deploy-app`](https://gitlab.com/gitlab-org/cluster-integration/auto-deploy-image/-/tree/master/assets/auto-deploy-app) is used to deploy.
|
||||
|
||||
There are extra variables that get passed to the CI jobs when Auto
|
||||
DevOps is enabled that are not present in a normal CI job. These can be
|
||||
DevOps is enabled that are not present in a typical CI job. These can be
|
||||
found in
|
||||
[`ProjectAutoDevops`](https://gitlab.com/gitlab-org/gitlab/-/blob/bf69484afa94e091c3e1383945f60dbe4e8681af/app/models/project_auto_devops.rb).
|
||||
|
||||
|
|
|
|||
|
|
@ -117,7 +117,7 @@ On average, we can say the following:
|
|||
|
||||
From the list, it's apparent that the number of `issues` records has
|
||||
the largest impact on the performance.
|
||||
As per normal usage, we can say that the number of issue records grows
|
||||
As per typical usage, we can say that the number of issue records grows
|
||||
at a faster rate than the `namespaces` and the `projects` records.
|
||||
|
||||
This problem affects most of our group-level features where records are listed
|
||||
|
|
|
|||
|
|
@ -159,7 +159,7 @@ configuration is necessary:
|
|||
- Function-based ordering.
|
||||
- Ordering with a custom tie-breaker column, like `iid`.
|
||||
|
||||
These order objects can be defined in the model classes as normal ActiveRecord scopes, there is no special behavior that prevents using these scopes elsewhere (Kaminari, background jobs).
|
||||
These order objects can be defined in the model classes as standard ActiveRecord scopes, there is no special behavior that prevents using these scopes elsewhere (Kaminari, background jobs).
|
||||
|
||||
### `NULLS LAST` ordering
|
||||
|
||||
|
|
|
|||
|
|
@ -370,7 +370,7 @@ end
|
|||
```
|
||||
|
||||
This endpoint still works when the parent `Project` model is deleted. This can be considered a
|
||||
a data leak which should not happen under normal circumstances:
|
||||
a data leak which should not happen under typical circumstances:
|
||||
|
||||
```ruby
|
||||
def show
|
||||
|
|
@ -723,7 +723,7 @@ timeout or a worker crash, the next job continues the processing.
|
|||
### Accumulation of deleted records
|
||||
|
||||
There can be cases where the workers need to process an unusually large amount of data. This can
|
||||
happen under normal usage, for example when a large project or group is deleted. In this scenario,
|
||||
happen under typical usage, for example when a large project or group is deleted. In this scenario,
|
||||
there can be several million rows to be deleted or nullified. Due to the limits enforced by the
|
||||
worker, processing this data takes some time.
|
||||
|
||||
|
|
|
|||
|
|
@ -214,7 +214,7 @@ Limit (cost=137878.89..137881.65 rows=20 width=1309) (actual time=5523.588..552
|
|||
(8 rows)
|
||||
```
|
||||
|
||||
We can argue that a normal user does not visit these pages, however, API users could easily navigate to very high page numbers (scraping, collecting data).
|
||||
We can argue that a typical user does not visit these pages, however, API users could easily navigate to very high page numbers (scraping, collecting data).
|
||||
|
||||
### Keyset pagination
|
||||
|
||||
|
|
|
|||
|
|
@ -163,7 +163,7 @@ Sometimes it's necessary to test locally what the frontend production build woul
|
|||
The production build takes a few minutes to be completed. Any code changes at this point are
|
||||
displayed only after executing the item 3 above again.
|
||||
|
||||
To return to the normal development mode:
|
||||
To return to the standard development mode:
|
||||
|
||||
1. Open `gitlab.yaml` located in your `gitlab` installation folder, scroll down to the `webpack` section and change back `dev_server` to `enabled: true`.
|
||||
1. Run `yarn clean` to remove the production assets and free some space (optional).
|
||||
|
|
|
|||
|
|
@ -20,7 +20,8 @@ Pipelines are always created for the following scenarios:
|
|||
|
||||
Pipeline creation is also affected by the following CI/CD variables:
|
||||
|
||||
- If `$FORCE_GITLAB_CI` is set, pipelines are created.
|
||||
- If `$FORCE_GITLAB_CI` is set, pipelines are created. Not recommended to use.
|
||||
See [Avoid `$FORCE_GITLAB_CI`](#avoid-force_gitlab_ci).
|
||||
- If `$GITLAB_INTERNAL` is not set, pipelines are not created.
|
||||
|
||||
No pipeline is created in any other cases (for example, when pushing a branch with no
|
||||
|
|
@ -28,6 +29,24 @@ MR for it).
|
|||
|
||||
The source of truth for these workflow rules is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
|
||||
|
||||
### Avoid `$FORCE_GITLAB_CI`
|
||||
|
||||
The pipeline is very complex and we need to clearly understand the kind of
|
||||
pipeline we want to trigger. We need to know which jobs we should run and
|
||||
which ones we shouldn't.
|
||||
|
||||
If we use `$FORCE_GITLAB_CI` to force trigger a pipeline,
|
||||
we don't really know what kind of pipeline it is. The result can be that we don't
|
||||
run the jobs we want, or we run too many jobs we don't care about.
|
||||
|
||||
Some more context and background can be found at:
|
||||
[Avoid blanket changes to avoid unexpected run](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102881)
|
||||
|
||||
Here's a list of where we're using this right now, and should try to move away
|
||||
from using `$FORCE_GITLAB_CI`.
|
||||
|
||||
- [JiHu validation pipeline](https://about.gitlab.com/handbook/ceo/chief-of-staff-team/jihu-support/jihu-validation-pipelines.html)
|
||||
|
||||
## Default image
|
||||
|
||||
The default image is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ When you are using the GitLab agent for Kubernetes, you might experience issues
|
|||
You can start by viewing the service logs:
|
||||
|
||||
```shell
|
||||
kubectl logs -f -l=app=gitlab-agent -n gitlab-agent
|
||||
kubectl logs -f -l=app.kubernetes.io/name=gitlab-agent -n gitlab-agent
|
||||
```
|
||||
|
||||
If you are a GitLab administrator, you can also view the [GitLab agent server logs](../../../administration/clusters/kas.md#troubleshooting).
|
||||
|
|
|
|||
|
|
@ -181,7 +181,7 @@ Storage types that add to the total namespace storage are:
|
|||
|
||||
- Git repository
|
||||
- Git LFS
|
||||
- Artifacts
|
||||
- Job artifacts
|
||||
- Container registry
|
||||
- Package registry
|
||||
- Dependency proxy
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
/.bundle/
|
||||
/.yardoc
|
||||
/_yardoc/
|
||||
/coverage/
|
||||
/doc/
|
||||
/pkg/
|
||||
/spec/reports/
|
||||
/tmp/
|
||||
|
||||
# rspec failure tracking
|
||||
.rspec_status
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
include:
|
||||
- local: gems/gem.gitlab-ci.yml
|
||||
inputs:
|
||||
gem_name: "gitlab-schema-validation"
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
--format documentation
|
||||
--color
|
||||
--require spec_helper
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
inherit_from:
|
||||
- ../config/rubocop.yml
|
||||
|
||||
AllCops:
|
||||
NewCops: enable
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
source "https://rubygems.org"
|
||||
|
||||
# Specify your gem's dependencies in gitlab-schema-validation.gemspec
|
||||
gemspec
|
||||
|
|
@ -0,0 +1,126 @@
|
|||
PATH
|
||||
remote: .
|
||||
specs:
|
||||
gitlab-schema-validation (0.1.0)
|
||||
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
activesupport (7.0.6)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
i18n (>= 1.6, < 2)
|
||||
minitest (>= 5.1)
|
||||
tzinfo (~> 2.0)
|
||||
ast (2.4.2)
|
||||
benchmark-malloc (0.2.0)
|
||||
benchmark-perf (0.6.0)
|
||||
benchmark-trend (0.4.0)
|
||||
binding_of_caller (1.0.0)
|
||||
debug_inspector (>= 0.0.1)
|
||||
coderay (1.1.3)
|
||||
concurrent-ruby (1.2.2)
|
||||
debug_inspector (1.1.0)
|
||||
diff-lcs (1.5.0)
|
||||
gitlab-styles (10.1.0)
|
||||
rubocop (~> 1.50.2)
|
||||
rubocop-graphql (~> 0.18)
|
||||
rubocop-performance (~> 1.15)
|
||||
rubocop-rails (~> 2.17)
|
||||
rubocop-rspec (~> 2.22)
|
||||
i18n (1.14.1)
|
||||
concurrent-ruby (~> 1.0)
|
||||
json (2.6.3)
|
||||
minitest (5.18.1)
|
||||
parallel (1.23.0)
|
||||
parser (3.2.2.3)
|
||||
ast (~> 2.4.1)
|
||||
racc
|
||||
proc_to_ast (0.1.0)
|
||||
coderay
|
||||
parser
|
||||
unparser
|
||||
racc (1.7.1)
|
||||
rack (3.0.8)
|
||||
rainbow (3.1.1)
|
||||
regexp_parser (2.8.1)
|
||||
rexml (3.2.5)
|
||||
rspec (3.12.0)
|
||||
rspec-core (~> 3.12.0)
|
||||
rspec-expectations (~> 3.12.0)
|
||||
rspec-mocks (~> 3.12.0)
|
||||
rspec-benchmark (0.6.0)
|
||||
benchmark-malloc (~> 0.2)
|
||||
benchmark-perf (~> 0.6)
|
||||
benchmark-trend (~> 0.4)
|
||||
rspec (>= 3.0)
|
||||
rspec-core (3.12.2)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-expectations (3.12.3)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-mocks (3.12.5)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-parameterized (1.0.0)
|
||||
rspec-parameterized-core (< 2)
|
||||
rspec-parameterized-table_syntax (< 2)
|
||||
rspec-parameterized-core (1.0.0)
|
||||
parser
|
||||
proc_to_ast
|
||||
rspec (>= 2.13, < 4)
|
||||
unparser
|
||||
rspec-parameterized-table_syntax (1.0.0)
|
||||
binding_of_caller
|
||||
rspec-parameterized-core (< 2)
|
||||
rspec-support (3.12.1)
|
||||
rubocop (1.50.2)
|
||||
json (~> 2.3)
|
||||
parallel (~> 1.10)
|
||||
parser (>= 3.2.0.0)
|
||||
rainbow (>= 2.2.2, < 4.0)
|
||||
regexp_parser (>= 1.8, < 3.0)
|
||||
rexml (>= 3.2.5, < 4.0)
|
||||
rubocop-ast (>= 1.28.0, < 2.0)
|
||||
ruby-progressbar (~> 1.7)
|
||||
unicode-display_width (>= 2.4.0, < 3.0)
|
||||
rubocop-ast (1.29.0)
|
||||
parser (>= 3.2.1.0)
|
||||
rubocop-capybara (2.18.0)
|
||||
rubocop (~> 1.41)
|
||||
rubocop-factory_bot (2.23.1)
|
||||
rubocop (~> 1.33)
|
||||
rubocop-graphql (0.19.0)
|
||||
rubocop (>= 0.87, < 2)
|
||||
rubocop-performance (1.18.0)
|
||||
rubocop (>= 1.7.0, < 2.0)
|
||||
rubocop-ast (>= 0.4.0)
|
||||
rubocop-rails (2.20.2)
|
||||
activesupport (>= 4.2.0)
|
||||
rack (>= 1.1)
|
||||
rubocop (>= 1.33.0, < 2.0)
|
||||
rubocop-rspec (2.22.0)
|
||||
rubocop (~> 1.33)
|
||||
rubocop-capybara (~> 2.17)
|
||||
rubocop-factory_bot (~> 2.22)
|
||||
ruby-progressbar (1.13.0)
|
||||
tzinfo (2.0.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
unicode-display_width (2.4.2)
|
||||
unparser (0.6.8)
|
||||
diff-lcs (~> 1.3)
|
||||
parser (>= 3.2.0)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
gitlab-schema-validation!
|
||||
gitlab-styles (~> 10.1.0)
|
||||
rspec (~> 3.0)
|
||||
rspec-benchmark (~> 0.6.0)
|
||||
rspec-parameterized (~> 1.0)
|
||||
rubocop (~> 1.50)
|
||||
rubocop-rspec (~> 2.22)
|
||||
|
||||
BUNDLED WITH
|
||||
2.4.14
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require_relative "lib/gitlab/schema/validation/version"
|
||||
|
||||
Gem::Specification.new do |spec|
|
||||
spec.name = "gitlab-schema-validation"
|
||||
spec.version = Gitlab::Schema::Validation::Version::VERSION
|
||||
spec.authors = ["group::database"]
|
||||
spec.email = ["engineering@gitlab.com"]
|
||||
|
||||
spec.summary = "Schema validation framework"
|
||||
spec.description = "Compares the differences between a structure.sql file and a database
|
||||
and reports the inconsistencies."
|
||||
spec.homepage = "https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/gitlab-schema-validation"
|
||||
spec.license = "MIT"
|
||||
spec.required_ruby_version = ">= 3.0"
|
||||
spec.metadata["rubygems_mfa_required"] = "true"
|
||||
|
||||
spec.files = Dir['lib/**/*.rb']
|
||||
spec.require_paths = ["lib"]
|
||||
|
||||
spec.add_development_dependency "gitlab-styles", "~> 10.1.0"
|
||||
spec.add_development_dependency "rspec", "~> 3.0"
|
||||
spec.add_development_dependency "rspec-benchmark", "~> 0.6.0"
|
||||
spec.add_development_dependency "rspec-parameterized", "~> 1.0"
|
||||
spec.add_development_dependency "rubocop", "~> 1.50"
|
||||
spec.add_development_dependency "rubocop-rspec", "~> 2.22"
|
||||
end
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require_relative "validation/version"
|
||||
|
||||
module Gitlab
|
||||
module Schema
|
||||
module Validation
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Schema
|
||||
module Validation
|
||||
module Version
|
||||
VERSION = "0.1.0"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.describe Gitlab::Schema::Validation do
|
||||
it "has a version number" do
|
||||
expect(Gitlab::Schema::Validation::Version::VERSION).not_to be_nil
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require "gitlab/schema/validation"
|
||||
|
||||
RSpec.configure do |config|
|
||||
# Enable flags like --only-failures and --next-failure
|
||||
config.example_status_persistence_file_path = ".rspec_status"
|
||||
|
||||
# Disable RSpec exposing methods globally on `Module` and `main`
|
||||
config.disable_monkey_patching!
|
||||
|
||||
config.expect_with :rspec do |c|
|
||||
c.syntax = :expect
|
||||
end
|
||||
end
|
||||
|
|
@ -531,14 +531,24 @@ module Gitlab
|
|||
request = Gitaly::GetCommitSignaturesRequest.new(repository: @gitaly_repo, commit_ids: commit_ids)
|
||||
response = gitaly_client_call(@repository.storage, :commit_service, :get_commit_signatures, request, timeout: GitalyClient.fast_timeout)
|
||||
|
||||
signatures = Hash.new { |h, k| h[k] = [+''.b, +''.b] }
|
||||
signatures = Hash.new do |h, k|
|
||||
h[k] = {
|
||||
signature: +''.b,
|
||||
signed_text: +''.b,
|
||||
signer: :SIGNER_UNSPECIFIED
|
||||
}
|
||||
end
|
||||
|
||||
current_commit_id = nil
|
||||
|
||||
response.each do |message|
|
||||
current_commit_id = message.commit_id if message.commit_id.present?
|
||||
|
||||
signatures[current_commit_id].first << message.signature
|
||||
signatures[current_commit_id].last << message.signed_text
|
||||
signatures[current_commit_id][:signature] << message.signature
|
||||
signatures[current_commit_id][:signed_text] << message.signed_text
|
||||
|
||||
# The actual value is send once. All the other chunks send SIGNER_UNSPECIFIED
|
||||
signatures[current_commit_id][:signer] = message.signer unless message.signer == :SIGNER_UNSPECIFIED
|
||||
end
|
||||
|
||||
signatures
|
||||
|
|
|
|||
|
|
@ -87,6 +87,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def verification_status(gpg_key)
|
||||
return :verified_system if verified_by_gitlab?
|
||||
return :multiple_signatures if multiple_signatures?
|
||||
return :unknown_key unless gpg_key
|
||||
return :unverified_key unless gpg_key.verified?
|
||||
|
|
@ -101,6 +102,15 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
# If a commit is signed by Gitaly, the Gitaly returns `SIGNER_SYSTEM` as a signer
|
||||
# In order to calculate it, the signature is Verified using the Gitaly's public key:
|
||||
# https://gitlab.com/gitlab-org/gitaly/-/blob/v16.2.0-rc2/internal/gitaly/service/commit/commit_signatures.go#L63
|
||||
#
|
||||
# It is safe to skip verification step if the commit has been signed by Gitaly
|
||||
def verified_by_gitlab?
|
||||
signer == :SIGNER_SYSTEM
|
||||
end
|
||||
|
||||
def user_infos(gpg_key)
|
||||
gpg_key&.verified_user_infos&.first || gpg_key&.user_infos&.first || {}
|
||||
end
|
||||
|
|
|
|||
|
|
@ -34,13 +34,19 @@ module Gitlab
|
|||
|
||||
def signature_text
|
||||
strong_memoize(:signature_text) do
|
||||
@signature_data.itself ? @signature_data[0] : nil
|
||||
@signature_data.itself ? @signature_data[:signature] : nil
|
||||
end
|
||||
end
|
||||
|
||||
def signed_text
|
||||
strong_memoize(:signed_text) do
|
||||
@signature_data.itself ? @signature_data[1] : nil
|
||||
@signature_data.itself ? @signature_data[:signed_text] : nil
|
||||
end
|
||||
end
|
||||
|
||||
def signer
|
||||
strong_memoize(:signer) do
|
||||
@signature_data.itself ? @signature_data[:signer] : nil
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def attributes
|
||||
signature = ::Gitlab::Ssh::Signature.new(signature_text, signed_text, @commit.committer_email)
|
||||
signature = ::Gitlab::Ssh::Signature.new(signature_text, signed_text, signer, @commit.committer_email)
|
||||
|
||||
{
|
||||
commit_sha: @commit.sha,
|
||||
|
|
|
|||
|
|
@ -11,15 +11,17 @@ module Gitlab
|
|||
|
||||
GIT_NAMESPACE = 'git'
|
||||
|
||||
def initialize(signature_text, signed_text, committer_email)
|
||||
def initialize(signature_text, signed_text, signer, committer_email)
|
||||
@signature_text = signature_text
|
||||
@signed_text = signed_text
|
||||
@signer = signer
|
||||
@committer_email = committer_email
|
||||
end
|
||||
|
||||
def verification_status
|
||||
strong_memoize(:verification_status) do
|
||||
next :unverified unless all_attributes_present?
|
||||
next :verified_system if verified_by_gitlab?
|
||||
next :unverified unless valid_signature_blob?
|
||||
next :unknown_key unless signed_by_key
|
||||
next :other_user unless committer
|
||||
|
|
@ -81,6 +83,15 @@ module Gitlab
|
|||
nil
|
||||
end
|
||||
end
|
||||
|
||||
# If a commit is signed by Gitaly, the Gitaly returns `SIGNER_SYSTEM` as a signer
|
||||
# In order to calculate it, the signature is Verified using the Gitaly's public key:
|
||||
# https://gitlab.com/gitlab-org/gitaly/-/blob/v16.2.0-rc2/internal/gitaly/service/commit/commit_signatures.go#L63
|
||||
#
|
||||
# It is safe to skip verification step if the commit has been signed by Gitaly
|
||||
def verified_by_gitlab?
|
||||
@signer == :SIGNER_SYSTEM
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -12623,15 +12623,36 @@ msgstr ""
|
|||
msgid "ContributionEvent|Approved merge request %{targetLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Deleted branch %{refLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Deleted tag %{refLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Joined project %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Left project %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Pushed a new branch %{refLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Pushed a new tag %{refLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Pushed to branch %{refLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Pushed to tag %{refLink} in %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|Removed due to membership expiration from %{resourceParentLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContributionEvent|…and %{count} more commits. %{linkStart}Compare%{linkEnd}."
|
||||
msgstr ""
|
||||
|
||||
msgid "Contributions for %{calendar_date}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -19788,6 +19809,9 @@ msgstr ""
|
|||
msgid "Generate project access tokens scoped to this project for your applications that need access to the GitLab API."
|
||||
msgstr ""
|
||||
|
||||
msgid "Generate root cause analysis"
|
||||
msgstr ""
|
||||
|
||||
msgid "Generate site and private keys at"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -26201,6 +26225,9 @@ msgstr ""
|
|||
msgid "Jobs|Raw text search is not currently supported for the jobs filtered search feature. Please use the available search tokens."
|
||||
msgstr ""
|
||||
|
||||
msgid "Jobs|Root cause analysis"
|
||||
msgstr ""
|
||||
|
||||
msgid "Jobs|There was a problem fetching the failed jobs."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -29368,6 +29395,9 @@ msgstr ""
|
|||
msgid "MlExperimentTracking|Model experiments"
|
||||
msgstr ""
|
||||
|
||||
msgid "MlExperimentTracking|Model registry"
|
||||
msgstr ""
|
||||
|
||||
msgid "MlExperimentTracking|Name"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -29404,6 +29434,9 @@ msgstr ""
|
|||
msgid "Model experiments"
|
||||
msgstr ""
|
||||
|
||||
msgid "ModelRegistry|Model registry"
|
||||
msgstr ""
|
||||
|
||||
msgid "Modified"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -39160,6 +39193,12 @@ msgstr ""
|
|||
msgid "Rollout of free user limits within GitLab.com. Do not edit these values unless approval has been given via %{link_start}this issue%{link_end}."
|
||||
msgstr ""
|
||||
|
||||
msgid "Root cause analysis"
|
||||
msgstr ""
|
||||
|
||||
msgid "Root cause analysis is a feature that analyzes your logs to determine why a job may have failed and the potential ways to fix it. To generate this analysis, we may share information in your job logs with %{linkStart}Third-Party AI providers%{linkEnd}. Before initiating this analysis, please do not include in your logs any information that could impact the security or privacy of your account."
|
||||
msgstr ""
|
||||
|
||||
msgid "Ruby"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -46640,6 +46679,9 @@ msgstr ""
|
|||
msgid "This commit is part of merge request %{link_to_merge_request}. Comments created here will be created in the context of that merge request."
|
||||
msgstr ""
|
||||
|
||||
msgid "This commit was created in the GitLab UI, and signed with a GitLab-verified signature."
|
||||
msgstr ""
|
||||
|
||||
msgid "This commit was signed with a %{strong_open}verified%{strong_close} signature and the committer email is verified to belong to the same user."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -49092,6 +49134,9 @@ msgstr ""
|
|||
msgid "UsageQuota|Pipelines"
|
||||
msgstr ""
|
||||
|
||||
msgid "UsageQuota|Project storage included in %{planName} subscription"
|
||||
msgstr ""
|
||||
|
||||
msgid "UsageQuota|Recalculate repository usage"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -51300,6 +51345,9 @@ msgstr ""
|
|||
msgid "What is repository mirroring?"
|
||||
msgstr ""
|
||||
|
||||
msgid "What is root cause analysis?"
|
||||
msgstr ""
|
||||
|
||||
msgid "What is squashing?"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -836,6 +836,7 @@ export const mockTokens = (fetchLabels, fetchUsers, fetchMilestones, isSignedIn)
|
|||
type: TOKEN_TYPE_ASSIGNEE,
|
||||
operators: OPERATORS_IS_NOT,
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
unique: true,
|
||||
fetchUsers,
|
||||
preloadedUsers: [],
|
||||
|
|
@ -847,6 +848,7 @@ export const mockTokens = (fetchLabels, fetchUsers, fetchMilestones, isSignedIn)
|
|||
operators: OPERATORS_IS_NOT,
|
||||
symbol: '@',
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
unique: true,
|
||||
fetchUsers,
|
||||
preloadedUsers: [],
|
||||
|
|
|
|||
|
|
@ -0,0 +1,141 @@
|
|||
import { mountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import ContributionEventPushed from '~/contribution_events/components/contribution_event/contribution_event_pushed.vue';
|
||||
import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue';
|
||||
import ResourceParentLink from '~/contribution_events/components/resource_parent_link.vue';
|
||||
import {
|
||||
eventPushedNewBranch,
|
||||
eventPushedNewTag,
|
||||
eventPushedBranch,
|
||||
eventPushedTag,
|
||||
eventPushedRemovedBranch,
|
||||
eventPushedRemovedTag,
|
||||
eventBulkPushedBranch,
|
||||
} from '../../utils';
|
||||
|
||||
describe('ContributionEventPushed', () => {
|
||||
let wrapper;
|
||||
|
||||
const createComponent = ({ propsData }) => {
|
||||
wrapper = mountExtended(ContributionEventPushed, {
|
||||
propsData,
|
||||
});
|
||||
};
|
||||
|
||||
describe.each`
|
||||
event | expectedMessage | expectedIcon
|
||||
${eventPushedNewBranch()} | ${'Pushed a new branch'} | ${'commit'}
|
||||
${eventPushedNewTag()} | ${'Pushed a new tag'} | ${'commit'}
|
||||
${eventPushedBranch()} | ${'Pushed to branch'} | ${'commit'}
|
||||
${eventPushedTag()} | ${'Pushed to tag'} | ${'commit'}
|
||||
${eventPushedRemovedBranch()} | ${'Deleted branch'} | ${'remove'}
|
||||
${eventPushedRemovedTag()} | ${'Deleted tag'} | ${'remove'}
|
||||
`('when event is $event', ({ event, expectedMessage, expectedIcon }) => {
|
||||
beforeEach(() => {
|
||||
createComponent({ propsData: { event } });
|
||||
});
|
||||
|
||||
it('renders `ContributionEventBase` with correct props', () => {
|
||||
expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
|
||||
event,
|
||||
iconName: expectedIcon,
|
||||
});
|
||||
});
|
||||
|
||||
it('renders message', () => {
|
||||
expect(wrapper.findByTestId('event-body').text()).toContain(expectedMessage);
|
||||
});
|
||||
|
||||
it('renders resource parent link', () => {
|
||||
expect(wrapper.findComponent(ResourceParentLink).props('event')).toEqual(event);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when ref has a path', () => {
|
||||
const event = eventPushedNewBranch();
|
||||
const path = '/foo';
|
||||
|
||||
beforeEach(() => {
|
||||
createComponent({
|
||||
propsData: {
|
||||
event: {
|
||||
...event,
|
||||
ref: {
|
||||
...event.ref,
|
||||
path,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('renders ref link', () => {
|
||||
expect(wrapper.findByRole('link', { name: event.ref.name }).attributes('href')).toBe(path);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when ref does not have a path', () => {
|
||||
const event = eventPushedRemovedBranch();
|
||||
|
||||
beforeEach(() => {
|
||||
createComponent({
|
||||
propsData: {
|
||||
event,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('renders ref name without a link', () => {
|
||||
expect(wrapper.findByRole('link', { name: event.ref.name }).exists()).toBe(false);
|
||||
expect(wrapper.findByText(event.ref.name).exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('renders renders a link to the commit', () => {
|
||||
const event = eventPushedNewBranch();
|
||||
createComponent({
|
||||
propsData: {
|
||||
event,
|
||||
},
|
||||
});
|
||||
|
||||
expect(
|
||||
wrapper.findByRole('link', { name: event.commit.truncated_sha }).attributes('href'),
|
||||
).toBe(event.commit.path);
|
||||
});
|
||||
|
||||
it('renders commit title', () => {
|
||||
const event = eventPushedNewBranch();
|
||||
createComponent({
|
||||
propsData: {
|
||||
event,
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.findByText(event.commit.title).exists()).toBe(true);
|
||||
});
|
||||
|
||||
describe('when multiple commits are pushed', () => {
|
||||
const event = eventBulkPushedBranch();
|
||||
beforeEach(() => {
|
||||
createComponent({
|
||||
propsData: {
|
||||
event,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('renders message', () => {
|
||||
expect(wrapper.text()).toContain('…and 4 more commits.');
|
||||
});
|
||||
|
||||
it('renders compare link', () => {
|
||||
expect(
|
||||
wrapper
|
||||
.findByRole('link', {
|
||||
name: `Compare ${event.commit.from_truncated_sha}…${event.commit.to_truncated_sha}`,
|
||||
})
|
||||
.attributes('href'),
|
||||
).toBe(event.commit.compare_path);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,11 +1,11 @@
|
|||
import events from 'test_fixtures/controller/users/activity.json';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import ContributionEvents from '~/contribution_events/components/contribution_events.vue';
|
||||
import ContributionEventApproved from '~/contribution_events/components/contribution_event/contribution_event_approved.vue';
|
||||
import ContributionEventExpired from '~/contribution_events/components/contribution_event/contribution_event_expired.vue';
|
||||
import ContributionEventJoined from '~/contribution_events/components/contribution_event/contribution_event_joined.vue';
|
||||
import ContributionEventLeft from '~/contribution_events/components/contribution_event/contribution_event_left.vue';
|
||||
import { eventApproved, eventExpired, eventJoined, eventLeft } from '../utils';
|
||||
import ContributionEventPushed from '~/contribution_events/components/contribution_event/contribution_event_pushed.vue';
|
||||
import { eventApproved, eventExpired, eventJoined, eventLeft, eventPushedBranch } from '../utils';
|
||||
|
||||
describe('ContributionEvents', () => {
|
||||
let wrapper;
|
||||
|
|
@ -13,7 +13,7 @@ describe('ContributionEvents', () => {
|
|||
const createComponent = () => {
|
||||
wrapper = shallowMountExtended(ContributionEvents, {
|
||||
propsData: {
|
||||
events,
|
||||
events: [eventApproved(), eventExpired(), eventJoined(), eventLeft(), eventPushedBranch()],
|
||||
},
|
||||
});
|
||||
};
|
||||
|
|
@ -24,6 +24,7 @@ describe('ContributionEvents', () => {
|
|||
${ContributionEventExpired} | ${eventExpired()}
|
||||
${ContributionEventJoined} | ${eventJoined()}
|
||||
${ContributionEventLeft} | ${eventLeft()}
|
||||
${ContributionEventPushed} | ${eventPushedBranch()}
|
||||
`(
|
||||
'renders `$expectedComponent.name` component and passes expected event',
|
||||
({ expectedComponent, expectedEvent }) => {
|
||||
|
|
|
|||
|
|
@ -4,6 +4,9 @@ import {
|
|||
EVENT_TYPE_EXPIRED,
|
||||
EVENT_TYPE_JOINED,
|
||||
EVENT_TYPE_LEFT,
|
||||
EVENT_TYPE_PUSHED,
|
||||
PUSH_EVENT_REF_TYPE_BRANCH,
|
||||
PUSH_EVENT_REF_TYPE_TAG,
|
||||
} from '~/contribution_events/constants';
|
||||
|
||||
const findEventByAction = (action) => events.find((event) => event.action === action);
|
||||
|
|
@ -15,3 +18,29 @@ export const eventExpired = () => findEventByAction(EVENT_TYPE_EXPIRED);
|
|||
export const eventJoined = () => findEventByAction(EVENT_TYPE_JOINED);
|
||||
|
||||
export const eventLeft = () => findEventByAction(EVENT_TYPE_LEFT);
|
||||
|
||||
const findPushEvent = ({
|
||||
isNew = false,
|
||||
isRemoved = false,
|
||||
refType = PUSH_EVENT_REF_TYPE_BRANCH,
|
||||
commitCount = 1,
|
||||
} = {}) => () =>
|
||||
events.find(
|
||||
({ action, ref, commit }) =>
|
||||
action === EVENT_TYPE_PUSHED &&
|
||||
ref.is_new === isNew &&
|
||||
ref.is_removed === isRemoved &&
|
||||
ref.type === refType &&
|
||||
commit.count === commitCount,
|
||||
);
|
||||
|
||||
export const eventPushedNewBranch = findPushEvent({ isNew: true });
|
||||
export const eventPushedNewTag = findPushEvent({ isNew: true, refType: PUSH_EVENT_REF_TYPE_TAG });
|
||||
export const eventPushedBranch = findPushEvent();
|
||||
export const eventPushedTag = findPushEvent({ refType: PUSH_EVENT_REF_TYPE_TAG });
|
||||
export const eventPushedRemovedBranch = findPushEvent({ isRemoved: true });
|
||||
export const eventPushedRemovedTag = findPushEvent({
|
||||
isRemoved: true,
|
||||
refType: PUSH_EVENT_REF_TYPE_TAG,
|
||||
});
|
||||
export const eventBulkPushedBranch = findPushEvent({ commitCount: 5 });
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import EnvironmentsBlock from '~/jobs/components/job/environments_block.vue';
|
|||
import ErasedBlock from '~/jobs/components/job/erased_block.vue';
|
||||
import JobApp from '~/jobs/components/job/job_app.vue';
|
||||
import JobLog from '~/jobs/components/log/log.vue';
|
||||
import JobLogTopBar from '~/jobs/components/job/job_log_controllers.vue';
|
||||
import JobLogTopBar from 'ee_else_ce/jobs/components/job/job_log_controllers.vue';
|
||||
import Sidebar from '~/jobs/components/job/sidebar/sidebar.vue';
|
||||
import StuckBlock from '~/jobs/components/job/stuck_block.vue';
|
||||
import UnmetPrerequisitesBlock from '~/jobs/components/job/unmet_prerequisites_block.vue';
|
||||
|
|
|
|||
|
|
@ -0,0 +1,39 @@
|
|||
import { GlLink } from '@gitlab/ui';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import MlModelsIndexApp from '~/ml/model_registry/routes/models/index';
|
||||
import { TITLE_LABEL } from '~/ml/model_registry/routes/models/index/translations';
|
||||
import { mockModels } from './mock_data';
|
||||
|
||||
let wrapper;
|
||||
const createWrapper = (models = mockModels) => {
|
||||
wrapper = shallowMountExtended(MlModelsIndexApp, {
|
||||
propsData: { models },
|
||||
});
|
||||
};
|
||||
|
||||
const findModelLink = (index) => wrapper.findAllComponents(GlLink).at(index);
|
||||
const modelLinkText = (index) => findModelLink(index).text();
|
||||
const modelLinkHref = (index) => findModelLink(index).attributes('href');
|
||||
const findTitle = () => wrapper.findByText(TITLE_LABEL);
|
||||
|
||||
describe('MlModelsIndex', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper();
|
||||
});
|
||||
|
||||
describe('header', () => {
|
||||
it('displays the title', () => {
|
||||
expect(findTitle().exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('model list', () => {
|
||||
it('displays the models', () => {
|
||||
expect(modelLinkHref(0)).toBe(mockModels[0].path);
|
||||
expect(modelLinkText(0)).toBe(`${mockModels[0].name} / ${mockModels[0].version}`);
|
||||
|
||||
expect(modelLinkHref(1)).toBe(mockModels[1].path);
|
||||
expect(modelLinkText(1)).toBe(`${mockModels[1].name} / ${mockModels[1].version}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
export const mockModels = [
|
||||
{
|
||||
name: 'model_1',
|
||||
version: '1.0',
|
||||
path: 'path/to/model_1',
|
||||
},
|
||||
{
|
||||
name: 'model_2',
|
||||
version: '1.0',
|
||||
path: 'path/to/model_2',
|
||||
},
|
||||
];
|
||||
|
|
@ -300,6 +300,7 @@ describe('AlertManagementEmptyState', () => {
|
|||
unique: true,
|
||||
symbol: '@',
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
operators: OPERATORS_IS,
|
||||
fetchPath: '/link',
|
||||
fetchUsers: expect.any(Function),
|
||||
|
|
@ -311,6 +312,7 @@ describe('AlertManagementEmptyState', () => {
|
|||
unique: true,
|
||||
symbol: '@',
|
||||
token: UserToken,
|
||||
dataType: 'user',
|
||||
operators: OPERATORS_IS,
|
||||
fetchPath: '/link',
|
||||
fetchUsers: expect.any(Function),
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue