Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
778ea71394
commit
ce34395e91
|
|
@ -3,6 +3,7 @@ import '~/behaviors/markdown/render_gfm';
|
|||
import Flash from '../../flash';
|
||||
import { handleLocationHash } from '../../lib/utils/common_utils';
|
||||
import axios from '../../lib/utils/axios_utils';
|
||||
import eventHub from '../../notes/event_hub';
|
||||
import { __ } from '~/locale';
|
||||
|
||||
const loadRichBlobViewer = type => {
|
||||
|
|
@ -178,6 +179,10 @@ export default class BlobViewer {
|
|||
viewer.innerHTML = data.html;
|
||||
viewer.setAttribute('data-loaded', 'true');
|
||||
|
||||
if (window.gon?.features?.codeNavigation) {
|
||||
eventHub.$emit('showBlobInteractionZones', viewer.dataset.path);
|
||||
}
|
||||
|
||||
return viewer;
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -31,6 +31,9 @@ export default {
|
|||
};
|
||||
},
|
||||
computed: {
|
||||
isCurrentDefinition() {
|
||||
return this.data.definitionLineNumber - 1 === this.position.lineIndex;
|
||||
},
|
||||
positionStyles() {
|
||||
return {
|
||||
left: `${this.position.x - this.offsetLeft}px`,
|
||||
|
|
@ -43,7 +46,7 @@ export default {
|
|||
}
|
||||
|
||||
if (this.isDefinitionCurrentBlob) {
|
||||
return `#${this.data.definition_path.split('#').pop()}`;
|
||||
return `#L${this.data.definitionLineNumber}`;
|
||||
}
|
||||
|
||||
return `${this.definitionPathPrefix}/${this.data.definition_path}`;
|
||||
|
|
@ -79,19 +82,29 @@ export default {
|
|||
class="popover code-navigation-popover popover-font-size-normal gl-popover bs-popover-bottom show"
|
||||
>
|
||||
<div :style="{ left: `${offsetLeft}px` }" class="arrow"></div>
|
||||
<div v-for="(hover, index) in data.hover" :key="index" class="border-bottom">
|
||||
<pre
|
||||
v-if="hover.language"
|
||||
ref="code-output"
|
||||
:class="$options.colorScheme"
|
||||
class="border-0 bg-transparent m-0 code highlight"
|
||||
><doc-line v-for="(tokens, tokenIndex) in hover.tokens" :key="tokenIndex" :language="hover.language" :tokens="tokens"/></pre>
|
||||
<p v-else ref="doc-output" class="p-3 m-0">
|
||||
{{ hover.value }}
|
||||
</p>
|
||||
<div class="overflow-auto code-navigation-popover-container">
|
||||
<div
|
||||
v-for="(hover, index) in data.hover"
|
||||
:key="index"
|
||||
:class="{ 'border-bottom': index !== data.hover.length - 1 }"
|
||||
>
|
||||
<pre
|
||||
v-if="hover.language"
|
||||
ref="code-output"
|
||||
:class="$options.colorScheme"
|
||||
class="border-0 bg-transparent m-0 code highlight text-wrap"
|
||||
><doc-line v-for="(tokens, tokenIndex) in hover.tokens" :key="tokenIndex" :language="hover.language" :tokens="tokens"/></pre>
|
||||
<p v-else ref="doc-output" class="p-3 m-0 gl-font-base">
|
||||
{{ hover.value }}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="definitionPath" class="popover-body">
|
||||
<div v-if="definitionPath || isCurrentDefinition" class="popover-body border-top">
|
||||
<span v-if="isCurrentDefinition" class="gl-font-weight-bold gl-font-base">
|
||||
{{ s__('CodeIntelligence|This is the definition') }}
|
||||
</span>
|
||||
<gl-button
|
||||
v-else
|
||||
:href="definitionPath"
|
||||
:target="isDefinitionCurrentBlob ? null : '_blank'"
|
||||
class="w-100"
|
||||
|
|
|
|||
|
|
@ -18,7 +18,10 @@ export default {
|
|||
.then(({ data }) => {
|
||||
const normalizedData = data.reduce((acc, d) => {
|
||||
if (d.hover) {
|
||||
acc[`${d.start_line}:${d.start_char}`] = d;
|
||||
acc[`${d.start_line}:${d.start_char}`] = {
|
||||
...d,
|
||||
definitionLineNumber: parseInt(d.definition_path?.split('#L').pop() || 0, 10),
|
||||
};
|
||||
addInteractionClass(path, d);
|
||||
}
|
||||
return acc;
|
||||
|
|
@ -67,6 +70,7 @@ export default {
|
|||
x: x || 0,
|
||||
y: y + window.scrollY || 0,
|
||||
height: el.offsetHeight,
|
||||
lineIndex: parseInt(lineIndex, 10),
|
||||
};
|
||||
definition = data[`${lineIndex}:${charIndex}`];
|
||||
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ export const addInteractionClass = (path, d) => {
|
|||
el.setAttribute('data-char-index', d.start_char);
|
||||
el.setAttribute('data-line-index', d.start_line);
|
||||
el.classList.add('cursor-pointer', 'code-navigation', 'js-code-navigation');
|
||||
el.closest('.line').classList.add('code-navigation-line');
|
||||
}
|
||||
});
|
||||
};
|
||||
|
|
|
|||
|
|
@ -89,13 +89,15 @@ export const getDayName = date =>
|
|||
* @example
|
||||
* dateFormat('2017-12-05','mmm d, yyyy h:MMtt Z' ) -> "Dec 5, 2017 12:00am GMT+0000"
|
||||
* @param {date} datetime
|
||||
* @param {String} format
|
||||
* @param {Boolean} UTC convert local time to UTC
|
||||
* @returns {String}
|
||||
*/
|
||||
export const formatDate = (datetime, format = 'mmm d, yyyy h:MMtt Z') => {
|
||||
export const formatDate = (datetime, format = 'mmm d, yyyy h:MMtt Z', utc = false) => {
|
||||
if (isString(datetime) && datetime.match(/\d+-\d+\d+ /)) {
|
||||
throw new Error(__('Invalid date'));
|
||||
}
|
||||
return dateFormat(datetime, format);
|
||||
return dateFormat(datetime, format, utc);
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -500,16 +500,27 @@ span.idiff {
|
|||
border: transparent;
|
||||
}
|
||||
|
||||
.code-navigation {
|
||||
border-bottom: 1px $gray-darkest dashed;
|
||||
.code-navigation-line:hover {
|
||||
.code-navigation {
|
||||
border-bottom: 1px $gray-darkest dashed;
|
||||
|
||||
&:hover {
|
||||
border-bottom-color: $almost-black;
|
||||
&:hover {
|
||||
border-bottom-color: $almost-black;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.code-navigation-popover {
|
||||
max-width: 450px;
|
||||
.code-navigation-popover.popover {
|
||||
max-width: calc(min(#{px-to-rem(560px)}, calc(100vw - #{$gl-padding-32})));
|
||||
}
|
||||
|
||||
.code-navigation-popover-container {
|
||||
max-height: px-to-rem(320px);
|
||||
}
|
||||
|
||||
.code-navigation-popover .code {
|
||||
padding-left: $grid-size * 3;
|
||||
text-indent: -$grid-size * 2;
|
||||
}
|
||||
|
||||
.tree-item-link {
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class Projects::Ci::LintsController < Projects::ApplicationController
|
|||
@errors = result.errors
|
||||
|
||||
if result.valid?
|
||||
@config_processor = result.content
|
||||
@config_processor = result.config
|
||||
@stages = @config_processor.stages
|
||||
@builds = @config_processor.builds
|
||||
@jobs = @config_processor.jobs
|
||||
|
|
|
|||
|
|
@ -55,8 +55,12 @@ module AlertManagement
|
|||
validates :severity, presence: true
|
||||
validates :status, presence: true
|
||||
validates :started_at, presence: true
|
||||
validates :fingerprint, uniqueness: { scope: :project }, allow_blank: true
|
||||
validate :hosts_length
|
||||
validates :fingerprint, allow_blank: true, uniqueness: {
|
||||
scope: :project,
|
||||
conditions: -> { where.not(status: STATUSES[:resolved]) },
|
||||
message: -> (object, data) { _('Cannot have multiple unresolved alerts') }
|
||||
}, unless: :resolved?
|
||||
validate :hosts_length
|
||||
|
||||
enum severity: {
|
||||
critical: 0,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ module BlobViewer
|
|||
self.partial_name = 'image'
|
||||
self.extensions = UploaderHelper::SAFE_IMAGE_EXT
|
||||
self.binary = true
|
||||
self.switcher_icon = 'picture-o'
|
||||
self.switcher_icon = 'doc-image'
|
||||
self.switcher_title = 'image'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ module BlobViewer
|
|||
self.partial_name = 'notebook'
|
||||
self.extensions = %w(ipynb)
|
||||
self.binary = false
|
||||
self.switcher_icon = 'file-text-o'
|
||||
self.switcher_icon = 'doc-text'
|
||||
self.switcher_title = 'notebook'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,8 +8,6 @@ module BlobViewer
|
|||
self.partial_name = 'openapi'
|
||||
self.file_types = %i(openapi)
|
||||
self.binary = false
|
||||
# TODO: get an icon for OpenAPI
|
||||
self.switcher_icon = 'file-pdf-o'
|
||||
self.switcher_title = 'OpenAPI'
|
||||
self.switcher_icon = 'api'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ module BlobViewer
|
|||
|
||||
included do
|
||||
self.type = :rich
|
||||
self.switcher_icon = 'file-text-o'
|
||||
self.switcher_icon = 'doc-text'
|
||||
self.switcher_title = 'rendered file'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ module BlobViewer
|
|||
self.partial_name = 'svg'
|
||||
self.extensions = %w(svg)
|
||||
self.binary = false
|
||||
self.switcher_icon = 'picture-o'
|
||||
self.switcher_icon = 'doc-image'
|
||||
self.switcher_title = 'image'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -641,9 +641,22 @@ module Ci
|
|||
end
|
||||
|
||||
def add_error_message(content)
|
||||
return unless Gitlab::Ci::Features.store_pipeline_messages?(project)
|
||||
add_message(:error, content)
|
||||
end
|
||||
|
||||
messages.error.build(content: content)
|
||||
def add_warning_message(content)
|
||||
add_message(:warning, content)
|
||||
end
|
||||
|
||||
# We can't use `messages.error` scope here because messages should also be
|
||||
# read when the pipeline is not persisted. Using the scope will return no
|
||||
# results as it would query persisted data.
|
||||
def error_messages
|
||||
messages.select(&:error?)
|
||||
end
|
||||
|
||||
def warning_messages
|
||||
messages.select(&:warning?)
|
||||
end
|
||||
|
||||
# Manually set the notes for a Ci::Pipeline
|
||||
|
|
@ -1019,6 +1032,12 @@ module Ci
|
|||
|
||||
private
|
||||
|
||||
def add_message(severity, content)
|
||||
return unless Gitlab::Ci::Features.store_pipeline_messages?(project)
|
||||
|
||||
messages.build(severity: severity, content: content)
|
||||
end
|
||||
|
||||
def pipeline_data
|
||||
Gitlab::DataBuilder::Pipeline.build(self)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ module DiffViewer
|
|||
self.partial_name = 'image'
|
||||
self.extensions = UploaderHelper::SAFE_IMAGE_EXT
|
||||
self.binary = true
|
||||
self.switcher_icon = 'picture-o'
|
||||
self.switcher_icon = 'doc-image'
|
||||
self.switcher_title = _('image diff')
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ class UserDetail < ApplicationRecord
|
|||
validates :job_title, length: { maximum: 200 }
|
||||
validates :bio, length: { maximum: 255 }, allow_blank: true
|
||||
|
||||
before_save :prevent_nil_bio
|
||||
|
||||
cache_markdown_field :bio
|
||||
|
||||
def bio_html
|
||||
|
|
@ -22,4 +24,10 @@ class UserDetail < ApplicationRecord
|
|||
def invalidated_markdown_cache?
|
||||
self.class.column_names.include?('bio_html') && super
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def prevent_nil_bio
|
||||
self.bio = '' if bio_changed? && bio.nil?
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ module Jira
|
|||
@search = params[:search]
|
||||
@labels = params[:labels]
|
||||
@status = params[:status]
|
||||
@state = params[:state]
|
||||
@reporter = params[:author_username]
|
||||
@assignee = params[:assignee_username]
|
||||
@sort = params[:sort] || DEFAULT_SORT
|
||||
|
|
@ -28,7 +29,7 @@ module Jira
|
|||
|
||||
private
|
||||
|
||||
attr_reader :jira_project_key, :sort, :sort_direction, :search, :labels, :status, :reporter, :assignee
|
||||
attr_reader :jira_project_key, :sort, :sort_direction, :search, :labels, :status, :reporter, :assignee, :state
|
||||
|
||||
def jql_filters
|
||||
[
|
||||
|
|
@ -37,6 +38,7 @@ module Jira
|
|||
by_status,
|
||||
by_reporter,
|
||||
by_assignee,
|
||||
by_open_and_closed,
|
||||
by_summary_and_description
|
||||
].compact.join(' AND ')
|
||||
end
|
||||
|
|
@ -80,6 +82,17 @@ module Jira
|
|||
%Q[assignee = "#{escape_quotes(assignee)}"]
|
||||
end
|
||||
|
||||
def by_open_and_closed
|
||||
return if state.blank?
|
||||
|
||||
case state
|
||||
when 'opened'
|
||||
%q[statusCategory != Done]
|
||||
when 'closed'
|
||||
%q[statusCategory = Done]
|
||||
end
|
||||
end
|
||||
|
||||
def escape_quotes(param)
|
||||
param.gsub('\\', '\\\\\\').gsub('"', '\\"')
|
||||
end
|
||||
|
|
|
|||
|
|
@ -15,14 +15,18 @@ module Labels
|
|||
def execute
|
||||
return unless old_group.present?
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
link_ids = group_labels_applied_to_issues.pluck("label_links.id") +
|
||||
group_labels_applied_to_merge_requests.pluck("label_links.id")
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
|
||||
Label.transaction do
|
||||
labels_to_transfer.find_each do |label|
|
||||
new_label_id = find_or_create_label!(label)
|
||||
|
||||
next if new_label_id == label.id
|
||||
|
||||
update_label_links(group_labels_applied_to_issues, old_label_id: label.id, new_label_id: new_label_id)
|
||||
update_label_links(group_labels_applied_to_merge_requests, old_label_id: label.id, new_label_id: new_label_id)
|
||||
update_label_links(link_ids, old_label_id: label.id, new_label_id: new_label_id)
|
||||
update_label_priorities(old_label_id: label.id, new_label_id: new_label_id)
|
||||
end
|
||||
end
|
||||
|
|
@ -46,7 +50,7 @@ module Labels
|
|||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def group_labels_applied_to_issues
|
||||
Label.joins(:issues)
|
||||
@group_labels_applied_to_issues ||= Label.joins(:issues)
|
||||
.where(
|
||||
issues: { project_id: project.id },
|
||||
labels: { type: 'GroupLabel', group_id: old_group.self_and_ancestors }
|
||||
|
|
@ -56,7 +60,7 @@ module Labels
|
|||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def group_labels_applied_to_merge_requests
|
||||
Label.joins(:merge_requests)
|
||||
@group_labels_applied_to_merge_requests ||= Label.joins(:merge_requests)
|
||||
.where(
|
||||
merge_requests: { target_project_id: project.id },
|
||||
labels: { type: 'GroupLabel', group_id: old_group.self_and_ancestors }
|
||||
|
|
@ -72,14 +76,7 @@ module Labels
|
|||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def update_label_links(labels, old_label_id:, new_label_id:)
|
||||
# use 'labels' relation to get label_link ids only of issues/MRs
|
||||
# in the project being transferred.
|
||||
# IDs are fetched in a separate query because MySQL doesn't
|
||||
# allow referring of 'label_links' table in UPDATE query:
|
||||
# https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/62435068
|
||||
link_ids = labels.pluck('label_links.id')
|
||||
|
||||
def update_label_links(link_ids, old_label_id:, new_label_id:)
|
||||
LabelLink.where(id: link_ids, label_id: old_label_id)
|
||||
.update_all(label_id: new_label_id)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ module Members
|
|||
def enqueue_unassign_issuables(member)
|
||||
source_type = member.is_a?(GroupMember) ? 'Group' : 'Project'
|
||||
|
||||
member.run_after_commit do
|
||||
member.run_after_commit_or_now do
|
||||
MembersDestroyer::UnassignIssuablesWorker.perform_async(member.user_id, member.source_id, source_type)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@
|
|||
- external_embed = local_assigns.fetch(:external_embed, false)
|
||||
|
||||
- viewer_url = local_assigns.fetch(:viewer_url) { url_for(safe_params.merge(viewer: viewer.type, format: :json)) } if load_async
|
||||
.blob-viewer{ data: { type: viewer.type, rich_type: rich_type, url: viewer_url }, class: ('hidden' if hidden) }
|
||||
.blob-viewer{ data: { type: viewer.type, rich_type: rich_type, url: viewer_url, path: viewer.blob.path }, class: ('hidden' if hidden) }
|
||||
- if render_error
|
||||
= render 'projects/blob/render_error', viewer: viewer
|
||||
- elsif load_async
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@
|
|||
.btn-group.js-blob-viewer-switcher.ml-2{ role: "group" }>
|
||||
- simple_label = "Display #{simple_viewer.switcher_title}"
|
||||
%button.btn.btn-default.btn-sm.js-blob-viewer-switch-btn.has-tooltip{ 'aria-label' => simple_label, title: simple_label, data: { viewer: 'simple', container: 'body' } }>
|
||||
= icon(simple_viewer.switcher_icon)
|
||||
= sprite_icon(simple_viewer.switcher_icon)
|
||||
|
||||
- rich_label = "Display #{rich_viewer.switcher_title}"
|
||||
%button.btn.btn-default.btn-sm.js-blob-viewer-switch-btn.has-tooltip{ 'aria-label' => rich_label, title: rich_label, data: { viewer: 'rich', container: 'body' } }>
|
||||
= icon(rich_viewer.switcher_icon)
|
||||
= sprite_icon(rich_viewer.switcher_icon)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class UpdateContainerRegistryInfoWorker
|
||||
include ApplicationWorker
|
||||
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
|
||||
|
||||
feature_category :container_registry
|
||||
urgency :low
|
||||
|
||||
idempotent!
|
||||
|
||||
def perform
|
||||
UpdateContainerRegistryInfoService.new.execute
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Periodically update container registry type settings
|
||||
merge_request: 36415
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Drop index of ruby objects in details on audit_events table
|
||||
merge_request: 36547
|
||||
author:
|
||||
type: other
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Change Alert fingerprint index to run when status is not resolved
|
||||
merge_request: 36024
|
||||
author:
|
||||
type: changed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix API errors when null value is given for the bio
|
||||
merge_request: 36650
|
||||
author:
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Use new icon for api preview
|
||||
merge_request: 34700
|
||||
author: Roger Meier
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Improved UX of the code navigation popover
|
||||
merge_request:
|
||||
author:
|
||||
type: changed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Ensure to run unassign issuables worker when not in a transaction
|
||||
merge_request: 36680
|
||||
author:
|
||||
type: fixed
|
||||
|
|
@ -500,6 +500,9 @@ Settings.cron_jobs['users_create_statistics_worker']['job_class'] = 'Users::Crea
|
|||
Settings.cron_jobs['authorized_project_update_periodic_recalculate_worker'] ||= Settingslogic.new({})
|
||||
Settings.cron_jobs['authorized_project_update_periodic_recalculate_worker']['cron'] ||= '45 1 * * 6'
|
||||
Settings.cron_jobs['authorized_project_update_periodic_recalculate_worker']['job_class'] = 'AuthorizedProjectUpdate::PeriodicRecalculateWorker'
|
||||
Settings.cron_jobs['update_container_registry_info_worker'] ||= Settingslogic.new({})
|
||||
Settings.cron_jobs['update_container_registry_info_worker']['cron'] ||= '0 0 * * *'
|
||||
Settings.cron_jobs['update_container_registry_info_worker']['job_class'] = 'UpdateContainerRegistryInfoWorker'
|
||||
|
||||
Gitlab.ee do
|
||||
Settings.cron_jobs['adjourned_group_deletion_worker'] ||= Settingslogic.new({})
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AdjustUniqueIndexAlertManagementAlerts < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
INDEX_NAME = 'index_alert_management_alerts_on_project_id_and_fingerprint'
|
||||
NEW_INDEX_NAME = 'index_partial_am_alerts_on_project_id_and_fingerprint'
|
||||
RESOLVED_STATUS = 2
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_index(:alert_management_alerts, %w(project_id fingerprint), where: "status <> #{RESOLVED_STATUS}", name: NEW_INDEX_NAME, unique: true, using: :btree)
|
||||
remove_concurrent_index_by_name :alert_management_alerts, INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
# Nullify duplicate fingerprints, except for the newest of each match (project_id, fingerprint).
|
||||
query = <<-SQL
|
||||
UPDATE alert_management_alerts am
|
||||
SET fingerprint = NULL
|
||||
WHERE am.created_at <>
|
||||
(SELECT MAX(created_at)
|
||||
FROM alert_management_alerts am2
|
||||
WHERE am.fingerprint = am2.fingerprint AND am.project_id = am2.project_id)
|
||||
AND am.fingerprint IS NOT NULL;
|
||||
SQL
|
||||
|
||||
execute(query)
|
||||
|
||||
remove_concurrent_index_by_name :alert_management_alerts, NEW_INDEX_NAME
|
||||
add_concurrent_index(:alert_management_alerts, %w(project_id fingerprint), name: INDEX_NAME, unique: true, using: :btree)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class DropIndexRubyObjectsInDetailsOnAuditEvents < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
INDEX_NAME = 'index_audit_events_on_ruby_object_in_details'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
remove_concurrent_index_by_name(:audit_events, INDEX_NAME)
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_index(:audit_events, :id, where: "details ~~ '%ruby/object%'", name: INDEX_NAME)
|
||||
end
|
||||
end
|
||||
|
|
@ -18505,8 +18505,6 @@ CREATE INDEX index_alert_management_alerts_on_environment_id ON public.alert_man
|
|||
|
||||
CREATE INDEX index_alert_management_alerts_on_issue_id ON public.alert_management_alerts USING btree (issue_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_alert_management_alerts_on_project_id_and_fingerprint ON public.alert_management_alerts USING btree (project_id, fingerprint);
|
||||
|
||||
CREATE UNIQUE INDEX index_alert_management_alerts_on_project_id_and_iid ON public.alert_management_alerts USING btree (project_id, iid);
|
||||
|
||||
CREATE INDEX index_alert_management_alerts_on_prometheus_alert_id ON public.alert_management_alerts USING btree (prometheus_alert_id) WHERE (prometheus_alert_id IS NOT NULL);
|
||||
|
|
@ -18609,8 +18607,6 @@ CREATE INDEX index_approvers_on_user_id ON public.approvers USING btree (user_id
|
|||
|
||||
CREATE INDEX index_audit_events_on_entity_id_entity_type_id_desc_author_id ON public.audit_events USING btree (entity_id, entity_type, id DESC, author_id);
|
||||
|
||||
CREATE INDEX index_audit_events_on_ruby_object_in_details ON public.audit_events USING btree (id) WHERE (details ~~ '%ruby/object%'::text);
|
||||
|
||||
CREATE INDEX index_award_emoji_on_awardable_type_and_awardable_id ON public.award_emoji USING btree (awardable_type, awardable_id);
|
||||
|
||||
CREATE INDEX index_award_emoji_on_user_id_and_name ON public.award_emoji USING btree (user_id, name);
|
||||
|
|
@ -19755,6 +19751,8 @@ CREATE INDEX index_pages_domains_on_verified_at_and_enabled_until ON public.page
|
|||
|
||||
CREATE INDEX index_pages_domains_on_wildcard ON public.pages_domains USING btree (wildcard);
|
||||
|
||||
CREATE UNIQUE INDEX index_partial_am_alerts_on_project_id_and_fingerprint ON public.alert_management_alerts USING btree (project_id, fingerprint) WHERE (status <> 2);
|
||||
|
||||
CREATE UNIQUE INDEX index_partitioned_foreign_keys_unique_index ON public.partitioned_foreign_keys USING btree (to_table, from_table, from_column);
|
||||
|
||||
CREATE INDEX index_pat_on_user_id_and_expires_at ON public.personal_access_tokens USING btree (user_id, expires_at);
|
||||
|
|
@ -23641,10 +23639,12 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200704143633
|
||||
20200704161600
|
||||
20200706005325
|
||||
20200706035141
|
||||
20200706154619
|
||||
20200706170536
|
||||
20200707071941
|
||||
20200707094341
|
||||
20200707095849
|
||||
20200710102846
|
||||
\.
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,93 @@
|
|||
---
|
||||
stage: Enablement
|
||||
group: Geo
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
type: howto
|
||||
---
|
||||
|
||||
# Disabling Geo **(PREMIUM ONLY)**
|
||||
|
||||
If you want to revert to a regular Omnibus setup after a test, or you have encountered a Disaster Recovery
|
||||
situation and you want to disable Geo momentarily, you can use these instructions to disable your
|
||||
Geo setup.
|
||||
|
||||
There should be no functional difference between disabling Geo and having an active Geo setup with
|
||||
no secondary Geo nodes if you remove them correctly.
|
||||
|
||||
To disable Geo, follow these steps:
|
||||
|
||||
1. [Remove all secondary Geo nodes](#remove-all-secondary-geo-nodes).
|
||||
1. [Remove the primary node from the UI](#remove-the-primary-node-from-the-ui).
|
||||
1. [Remove secondary replication slots](#remove-secondary-replication-slots).
|
||||
1. [Remove Geo-related configuration](#remove-geo-related-configuration).
|
||||
1. [(Optional) Revert PostgreSQL settings to use a password and listen on an IP](#optional-revert-postgresql-settings-to-use-a-password-and-listen-on-an-ip).
|
||||
|
||||
## Remove all secondary Geo nodes
|
||||
|
||||
To disable Geo, you need to first remove all your secondary Geo nodes, which means replication will not happen
|
||||
anymore on these nodes. You can follow our docs to [remove your secondary Geo nodes](./remove_geo_node.md).
|
||||
|
||||
If the current node that you want to keep using is a secondary node, you need to first promote it to primary.
|
||||
You can use our steps on [how to promote a secondary node](../disaster_recovery/#step-3-promoting-a-secondary-node)
|
||||
in order to do that.
|
||||
|
||||
## Remove the primary node from the UI
|
||||
|
||||
1. Go to **{admin}** **Admin Area >** **{location-dot}** **Geo** (`/admin/geo/nodes`).
|
||||
1. Click the **Remove** button for the **primary** node.
|
||||
1. Confirm by clicking **Remove** when the prompt appears.
|
||||
|
||||
## Remove secondary replication slots
|
||||
|
||||
To remove secondary replication slots, run one of the following queries on your primary
|
||||
Geo node in a PostgreSQL console (`sudo gitlab-psql`):
|
||||
|
||||
- If you already have a PostgreSQL cluster, drop individual replication slots by name to prevent
|
||||
removing your secondary databases from the same cluster. You can use the following to get
|
||||
all names and then drop each individual slot:
|
||||
|
||||
```sql
|
||||
SELECT slot_name, slot_type, active FROM pg_replication_slots; -- view present replication slots
|
||||
SELECT pg_drop_replication_slot('slot_name'); -- where slot_name is the one expected from above
|
||||
```
|
||||
|
||||
- To remove all secondary replication slots:
|
||||
|
||||
```sql
|
||||
SELECT pg_drop_replication_slot(slot_name) FROM pg_replication_slots;
|
||||
```
|
||||
|
||||
## Remove Geo-related configuration
|
||||
|
||||
1. SSH into your primary Geo node and log in as root:
|
||||
|
||||
```shell
|
||||
sudo -i
|
||||
```
|
||||
|
||||
1. Edit `/etc/gitlab/gitlab.rb` and remove the Geo related configuration by
|
||||
removing any lines that enabled `geo_primary_role`:
|
||||
|
||||
```ruby
|
||||
## In pre-11.5 documentation, the role was enabled as follows. Remove this line.
|
||||
geo_primary_role['enable'] = true
|
||||
|
||||
## In 11.5+ documentation, the role was enabled as follows. Remove this line.
|
||||
roles ['geo_primary_role']
|
||||
```
|
||||
|
||||
1. After making these changes, [reconfigure GitLab](../../restart_gitlab.md#omnibus-gitlab-reconfigure)
|
||||
for the changes to take effect.
|
||||
|
||||
## (Optional) Revert PostgreSQL settings to use a password and listen on an IP
|
||||
|
||||
If you want to remove the PostgreSQL-specific settings and revert
|
||||
to the defaults (using a socket instead), you can safely remove the following
|
||||
lines from the `/etc/gitlab/gitlab.rb` file:
|
||||
|
||||
```ruby
|
||||
postgresql['sql_user_password'] = '...'
|
||||
gitlab_rails['db_password'] = '...'
|
||||
postgresql['listen_address'] = '...'
|
||||
postgresql['md5_auth_cidr_addresses'] = ['...', '...']
|
||||
```
|
||||
|
|
@ -265,6 +265,10 @@ For an example of how to set up a location-aware Git remote URL with AWS Route53
|
|||
|
||||
For more information on removing a Geo node, see [Removing **secondary** Geo nodes](remove_geo_node.md).
|
||||
|
||||
## Disable Geo
|
||||
|
||||
To find out how to disable Geo, see [Disabling Geo](disable_geo.md).
|
||||
|
||||
## Current limitations
|
||||
|
||||
CAUTION: **Caution:**
|
||||
|
|
|
|||
|
|
@ -264,6 +264,7 @@ instance (`cache`, `shared_state` etc.).
|
|||
|:--------------------------------- |:------- |:----- |:----------- |
|
||||
| `gitlab_redis_client_exceptions_total` | Counter | 13.2 | Number of Redis client exceptions, broken down by exception class |
|
||||
| `gitlab_redis_client_requests_total` | Counter | 13.2 | Number of Redis client requests |
|
||||
| `gitlab_redis_client_requests_duration_seconds` | Histogram | 13.2 | Redis request latency, excluding blocking commands |
|
||||
|
||||
## Metrics shared directory
|
||||
|
||||
|
|
|
|||
|
|
@ -89,8 +89,8 @@ GET /users
|
|||
"web_url": "http://localhost:3000/john_smith",
|
||||
"created_at": "2012-05-23T08:00:58Z",
|
||||
"is_admin": false,
|
||||
"bio": null,
|
||||
"bio_html": null,
|
||||
"bio": "",
|
||||
"bio_html": "",
|
||||
"location": null,
|
||||
"skype": "",
|
||||
"linkedin": "",
|
||||
|
|
@ -129,8 +129,8 @@ GET /users
|
|||
"web_url": "http://localhost:3000/jack_smith",
|
||||
"created_at": "2012-05-23T08:01:01Z",
|
||||
"is_admin": false,
|
||||
"bio": null,
|
||||
"bio_html": null,
|
||||
"bio": "",
|
||||
"bio_html": "",
|
||||
"location": null,
|
||||
"skype": "",
|
||||
"linkedin": "",
|
||||
|
|
@ -247,8 +247,8 @@ Parameters:
|
|||
"avatar_url": "http://localhost:3000/uploads/user/avatar/1/cd8.jpeg",
|
||||
"web_url": "http://localhost:3000/john_smith",
|
||||
"created_at": "2012-05-23T08:00:58Z",
|
||||
"bio": null,
|
||||
"bio_html": null,
|
||||
"bio": "",
|
||||
"bio_html": "",
|
||||
"location": null,
|
||||
"public_email": "john@example.com",
|
||||
"skype": "",
|
||||
|
|
@ -283,8 +283,8 @@ Example Responses:
|
|||
"web_url": "http://localhost:3000/john_smith",
|
||||
"created_at": "2012-05-23T08:00:58Z",
|
||||
"is_admin": false,
|
||||
"bio": null,
|
||||
"bio_html": null,
|
||||
"bio": "",
|
||||
"bio_html": "",
|
||||
"location": null,
|
||||
"public_email": "john@example.com",
|
||||
"skype": "",
|
||||
|
|
@ -372,6 +372,9 @@ over `password`. In addition, `reset_password` and
|
|||
NOTE: **Note:**
|
||||
From [GitLab 12.1](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/29888/), `private_profile` will default to `false`.
|
||||
|
||||
NOTE: **Note:**
|
||||
From [GitLab 13.2](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/35604), `bio` will default to `""` instead of `null`.
|
||||
|
||||
```plaintext
|
||||
POST /users
|
||||
```
|
||||
|
|
@ -503,8 +506,8 @@ GET /user
|
|||
"avatar_url": "http://localhost:3000/uploads/user/avatar/1/index.jpg",
|
||||
"web_url": "http://localhost:3000/john_smith",
|
||||
"created_at": "2012-05-23T08:00:58Z",
|
||||
"bio": null,
|
||||
"bio_html": null,
|
||||
"bio": "",
|
||||
"bio_html": "",
|
||||
"location": null,
|
||||
"public_email": "john@example.com",
|
||||
"skype": "",
|
||||
|
|
@ -553,8 +556,8 @@ GET /user
|
|||
"web_url": "http://localhost:3000/john_smith",
|
||||
"created_at": "2012-05-23T08:00:58Z",
|
||||
"is_admin": false,
|
||||
"bio": null,
|
||||
"bio_html": null,
|
||||
"bio": "",
|
||||
"bio_html": "",
|
||||
"location": null,
|
||||
"public_email": "john@example.com",
|
||||
"skype": "",
|
||||
|
|
|
|||
|
|
@ -15,6 +15,12 @@ To use reCAPTCHA, first you must create a site and private key.
|
|||
1. Fill all reCAPTCHA fields with keys from previous steps.
|
||||
1. Check the `Enable reCAPTCHA` checkbox.
|
||||
1. Save the configuration.
|
||||
1. Change the first line of the `#execute` method in `app/services/spam/spam_verdict_service.rb`
|
||||
to `return CONDITONAL_ALLOW` so that the spam check short-circuits and triggers the response to
|
||||
return `recaptcha_html`.
|
||||
|
||||
NOTE: **Note:**
|
||||
Make sure you are viewing an issuable in a project that is public, and if you're working with an issue, the issue is public.
|
||||
|
||||
## Enabling reCAPTCHA for user logins via passwords
|
||||
|
||||
|
|
|
|||
|
|
@ -184,6 +184,33 @@ of a group:
|
|||
1. Give a different member **Owner** permissions.
|
||||
1. Have the new owner sign in and remove **Owner** permissions from you.
|
||||
|
||||
## Remove a member from the group
|
||||
|
||||
Only users with permissions of [Owner](../permissions.md#group-members-permissions) can manage
|
||||
group members.
|
||||
|
||||
You can remove a member from the group if the given member has a direct membership in the group. If
|
||||
membership is inherited from a parent group, then the member can be removed only from the parent
|
||||
group itself.
|
||||
|
||||
When removing a member, you can decide whether to unassign the user from all issues and merge
|
||||
requests they are currently assigned or leave the assignments as they are.
|
||||
|
||||
- **Unassigning the removed member** from all issues and merge requests might be helpful when a user
|
||||
is leaving a private group and you wish to revoke their access to any issues and merge requests
|
||||
they are assigned.
|
||||
- **Keeping the issues and merge requests assigned** might be helpful for groups that accept public
|
||||
contributions where a user doesn't have to be a member to be able to contribute to issues and
|
||||
merge requests.
|
||||
|
||||
To remove a member from a group:
|
||||
|
||||
1. In a group, go to **{users}** **Members**.
|
||||
1. Click the **Delete** **{remove}** button next to a group member you want to remove.
|
||||
A **Remove member** modal appears.
|
||||
1. (Optional) Select the **Also unassign this user from related issues and merge requests** checkbox.
|
||||
1. Click **Remove member**.
|
||||
|
||||
## Changing the default branch protection of a group
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/7583) in GitLab 12.9.
|
||||
|
|
|
|||
|
|
@ -128,3 +128,30 @@ If you change your mind before your request is approved, just click the
|
|||
## Share project with group
|
||||
|
||||
Alternatively, you can [share a project with an entire group](share_project_with_groups.md) instead of adding users one by one.
|
||||
|
||||
## Remove a member from the project
|
||||
|
||||
Only users with permissions of [Owner](../../permissions.md#group-members-permissions) can manage
|
||||
project members.
|
||||
|
||||
You can remove a user from the project if the given member has a direct membership in the project.
|
||||
If membership is inherited from a parent group, then the member can be removed only from the parent
|
||||
group itself.
|
||||
|
||||
When removing a member, you can decide whether to unassign the user from all issues and merge
|
||||
requests they are currently assigned or leave the assignments as they are.
|
||||
|
||||
- **Unassigning the removed member** from all issues and merge requests might be helpful when a user
|
||||
is leaving a private project and you wish to revoke their access to any issues and merge requests
|
||||
they are assigned.
|
||||
- **Keeping the issues and merge requests assigned** might be helpful for projects that accept public
|
||||
contributions where a user doesn't have to be a member to be able to contribute to issues and
|
||||
merge requests.
|
||||
|
||||
To remove a member from a project:
|
||||
|
||||
1. In a project, go to **{users}** **Members**.
|
||||
1. Click the **Delete** **{remove}** button next to a project member you want to remove.
|
||||
A **Remove member** modal appears.
|
||||
1. (Optional) Select the **Also unassign this user from related issues and merge requests** checkbox.
|
||||
1. Click **Remove member**.
|
||||
|
|
|
|||
|
|
@ -39,6 +39,10 @@ module Gitlab
|
|||
@root.errors
|
||||
end
|
||||
|
||||
def warnings
|
||||
@root.warnings
|
||||
end
|
||||
|
||||
def to_hash
|
||||
@config
|
||||
end
|
||||
|
|
|
|||
|
|
@ -82,6 +82,10 @@ module Gitlab
|
|||
@entries.delete(:except) unless except_defined? # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
end
|
||||
|
||||
if has_rules? && !has_workflow_rules && Gitlab::Ci::Features.raise_job_rules_without_workflow_rules_warning?
|
||||
add_warning('uses `rules` without defining `workflow:rules`')
|
||||
end
|
||||
|
||||
# inherit root variables
|
||||
@root_variables_value = deps&.variables_value # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
|
||||
|
|
|
|||
|
|
@ -47,6 +47,12 @@ module Gitlab
|
|||
def self.variables_api_filter_environment_scope?
|
||||
::Feature.enabled?(:ci_variables_api_filter_environment_scope, default_enabled: false)
|
||||
end
|
||||
|
||||
# This FF is only used for development purpose to test that warnings can be
|
||||
# raised and propagated to the UI.
|
||||
def self.raise_job_rules_without_workflow_rules_warning?
|
||||
::Feature.enabled?(:ci_raise_job_rules_without_workflow_rules_warning)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -19,7 +19,11 @@ module Gitlab
|
|||
parent_pipeline: parent_pipeline
|
||||
}
|
||||
)
|
||||
|
||||
add_warnings_to_pipeline(@command.config_processor.warnings)
|
||||
rescue Gitlab::Ci::YamlProcessor::ValidationError => ex
|
||||
add_warnings_to_pipeline(ex.warnings)
|
||||
|
||||
error(ex.message, config_error: true)
|
||||
rescue => ex
|
||||
Gitlab::ErrorTracking.track_exception(ex,
|
||||
|
|
@ -34,6 +38,14 @@ module Gitlab
|
|||
def break?
|
||||
@pipeline.errors.any? || @pipeline.persisted?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def add_warnings_to_pipeline(warnings)
|
||||
return unless warnings.present?
|
||||
|
||||
warnings.each { |message| warning(message) }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -19,6 +19,10 @@ module Gitlab
|
|||
# https://gitlab.com/gitlab-org/gitlab/-/issues/220823
|
||||
pipeline.errors.add(:base, message)
|
||||
end
|
||||
|
||||
def warning(message)
|
||||
pipeline.add_warning_message(message)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,15 +3,33 @@
|
|||
module Gitlab
|
||||
module Ci
|
||||
class YamlProcessor
|
||||
ValidationError = Class.new(StandardError)
|
||||
# ValidationError is treated like a result object in the form of an exception.
|
||||
# We can return any warnings, raised during the config validation, along with
|
||||
# the error object until we support multiple messages to be returned.
|
||||
class ValidationError < StandardError
|
||||
attr_reader :warnings
|
||||
|
||||
def initialize(message, warnings: [])
|
||||
@warnings = warnings
|
||||
super(message)
|
||||
end
|
||||
end
|
||||
|
||||
include Gitlab::Config::Entry::LegacyValidationHelpers
|
||||
|
||||
attr_reader :stages, :jobs
|
||||
|
||||
ResultWithErrors = Struct.new(:content, :errors) do
|
||||
class Result
|
||||
attr_reader :config, :errors, :warnings
|
||||
|
||||
def initialize(config: nil, errors: [], warnings: [])
|
||||
@config = config
|
||||
@errors = errors
|
||||
@warnings = warnings
|
||||
end
|
||||
|
||||
def valid?
|
||||
errors.empty?
|
||||
config.present? && errors.empty?
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -20,24 +38,32 @@ module Gitlab
|
|||
@config = @ci_config.to_hash
|
||||
|
||||
unless @ci_config.valid?
|
||||
raise ValidationError, @ci_config.errors.first
|
||||
error!(@ci_config.errors.first)
|
||||
end
|
||||
|
||||
initial_parsing
|
||||
rescue Gitlab::Ci::Config::ConfigError => e
|
||||
raise ValidationError, e.message
|
||||
error!(e.message)
|
||||
end
|
||||
|
||||
def self.new_with_validation_errors(content, opts = {})
|
||||
return ResultWithErrors.new('', ['Please provide content of .gitlab-ci.yml']) if content.blank?
|
||||
return Result.new(errors: ['Please provide content of .gitlab-ci.yml']) if content.blank?
|
||||
|
||||
config = Gitlab::Ci::Config.new(content, **opts)
|
||||
return ResultWithErrors.new("", config.errors) unless config.valid?
|
||||
return Result.new(errors: config.errors, warnings: config.warnings) unless config.valid?
|
||||
|
||||
config = Gitlab::Ci::YamlProcessor.new(content, opts)
|
||||
ResultWithErrors.new(config, [])
|
||||
rescue ValidationError, Gitlab::Ci::Config::ConfigError => e
|
||||
ResultWithErrors.new('', [e.message])
|
||||
Result.new(config: config, warnings: config.warnings)
|
||||
|
||||
rescue ValidationError => e
|
||||
Result.new(errors: [e.message], warnings: e.warnings)
|
||||
|
||||
rescue Gitlab::Ci::Config::ConfigError => e
|
||||
Result.new(errors: [e.message])
|
||||
end
|
||||
|
||||
def warnings
|
||||
@ci_config&.warnings || []
|
||||
end
|
||||
|
||||
def builds
|
||||
|
|
@ -157,10 +183,14 @@ module Gitlab
|
|||
return unless job[:stage]
|
||||
|
||||
unless job[:stage].is_a?(String) && job[:stage].in?(@stages)
|
||||
raise ValidationError, "#{name} job: chosen stage does not exist; available stages are #{@stages.join(", ")}"
|
||||
error!("#{name} job: chosen stage does not exist; available stages are #{@stages.join(", ")}")
|
||||
end
|
||||
end
|
||||
|
||||
def error!(message)
|
||||
raise ValidationError.new(message, warnings: warnings)
|
||||
end
|
||||
|
||||
def validate_job_dependencies!(name, job)
|
||||
return unless job[:dependencies]
|
||||
|
||||
|
|
@ -190,7 +220,7 @@ module Gitlab
|
|||
|
||||
def validate_job_dependency!(name, dependency, dependency_type = 'dependency')
|
||||
unless @jobs[dependency.to_sym]
|
||||
raise ValidationError, "#{name} job: undefined #{dependency_type}: #{dependency}"
|
||||
error!("#{name} job: undefined #{dependency_type}: #{dependency}")
|
||||
end
|
||||
|
||||
job_stage_index = stage_index(name)
|
||||
|
|
@ -199,7 +229,7 @@ module Gitlab
|
|||
# A dependency might be defined later in the configuration
|
||||
# with a stage that does not exist
|
||||
unless dependency_stage_index.present? && dependency_stage_index < job_stage_index
|
||||
raise ValidationError, "#{name} job: #{dependency_type} #{dependency} is not defined in prior stages"
|
||||
error!("#{name} job: #{dependency_type} #{dependency} is not defined in prior stages")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -221,19 +251,19 @@ module Gitlab
|
|||
|
||||
on_stop_job = @jobs[on_stop.to_sym]
|
||||
unless on_stop_job
|
||||
raise ValidationError, "#{name} job: on_stop job #{on_stop} is not defined"
|
||||
error!("#{name} job: on_stop job #{on_stop} is not defined")
|
||||
end
|
||||
|
||||
unless on_stop_job[:environment]
|
||||
raise ValidationError, "#{name} job: on_stop job #{on_stop} does not have environment defined"
|
||||
error!("#{name} job: on_stop job #{on_stop} does not have environment defined")
|
||||
end
|
||||
|
||||
unless on_stop_job[:environment][:name] == environment[:name]
|
||||
raise ValidationError, "#{name} job: on_stop job #{on_stop} have different environment name"
|
||||
error!("#{name} job: on_stop job #{on_stop} have different environment name")
|
||||
end
|
||||
|
||||
unless on_stop_job[:environment][:action] == 'stop'
|
||||
raise ValidationError, "#{name} job: on_stop job #{on_stop} needs to have action stop defined"
|
||||
error!("#{name} job: on_stop job #{on_stop} needs to have action stop defined")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ module Gitlab
|
|||
@config = config
|
||||
@metadata = metadata
|
||||
@entries = {}
|
||||
@warnings = []
|
||||
|
||||
yield(self) if block_given?
|
||||
|
||||
|
|
@ -60,6 +61,14 @@ module Gitlab
|
|||
[]
|
||||
end
|
||||
|
||||
def warnings
|
||||
@warnings + descendants.flat_map(&:warnings)
|
||||
end
|
||||
|
||||
def add_warning(message)
|
||||
@warnings << "#{location} #{message}"
|
||||
end
|
||||
|
||||
def value
|
||||
if leaf?
|
||||
@config
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ module Gitlab
|
|||
}.freeze
|
||||
# First-match win, so be sure to put more specific regex at the top...
|
||||
CATEGORIES = {
|
||||
%r{\Adoc/} => :docs,
|
||||
%r{\Adoc/.*(\.(md|png|gif|jpg))\z} => :docs,
|
||||
%r{\A(CONTRIBUTING|LICENSE|MAINTENANCE|PHILOSOPHY|PROCESS|README)(\.md)?\z} => :docs,
|
||||
|
||||
%r{\A(ee/)?app/(assets|views)/} => :frontend,
|
||||
|
|
|
|||
|
|
@ -81,12 +81,12 @@ module Gitlab
|
|||
self
|
||||
end
|
||||
|
||||
def count_request
|
||||
def instance_count_request
|
||||
@request_counter ||= Gitlab::Metrics.counter(:gitlab_redis_client_requests_total, 'Client side Redis request count, per Redis server')
|
||||
@request_counter.increment({ storage: storage_key })
|
||||
end
|
||||
|
||||
def count_exception(ex)
|
||||
def instance_count_exception(ex)
|
||||
# This metric is meant to give a client side view of how the Redis
|
||||
# server is doing. Redis itself does not expose error counts. This
|
||||
# metric can be used for Redis alerting and service health monitoring.
|
||||
|
|
@ -94,6 +94,17 @@ module Gitlab
|
|||
@exception_counter.increment({ storage: storage_key, exception: ex.class.to_s })
|
||||
end
|
||||
|
||||
def instance_observe_duration(duration)
|
||||
@request_latency_histogram ||= Gitlab::Metrics.histogram(
|
||||
:gitlab_redis_client_requests_duration_seconds,
|
||||
'Client side Redis request latency, per Redis server, excluding blocking commands',
|
||||
{},
|
||||
[0.001, 0.005, 0.01]
|
||||
)
|
||||
|
||||
@request_latency_histogram.observe({ storage: storage_key }, duration)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def request_count_key
|
||||
|
|
|
|||
|
|
@ -5,19 +5,26 @@ require 'redis'
|
|||
module Gitlab
|
||||
module Instrumentation
|
||||
module RedisInterceptor
|
||||
APDEX_EXCLUDE = %w[brpop blpop brpoplpush bzpopmin bzpopmax xread xreadgroup].freeze
|
||||
|
||||
def call(*args, &block)
|
||||
instrumentation_class.count_request
|
||||
start = Time.now # must come first so that 'start' is always defined
|
||||
instrumentation_class.instance_count_request
|
||||
instrumentation_class.redis_cluster_validate!(args.first)
|
||||
start = Time.now
|
||||
|
||||
super(*args, &block)
|
||||
rescue ::Redis::BaseError => ex
|
||||
instrumentation_class.count_exception(ex)
|
||||
instrumentation_class.instance_count_exception(ex)
|
||||
raise ex
|
||||
ensure
|
||||
duration = (Time.now - start)
|
||||
duration = Time.now - start
|
||||
|
||||
unless APDEX_EXCLUDE.include?(command_from_args(args))
|
||||
instrumentation_class.instance_observe_duration(duration)
|
||||
end
|
||||
|
||||
if ::RequestStore.active?
|
||||
# These metrics measure total Redis usage per Rails request / job.
|
||||
instrumentation_class.increment_request_count
|
||||
instrumentation_class.add_duration(duration)
|
||||
instrumentation_class.add_call_details(duration, args)
|
||||
|
|
@ -83,6 +90,12 @@ module Gitlab
|
|||
def instrumentation_class
|
||||
@options[:instrumentation_class] # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
end
|
||||
|
||||
def command_from_args(args)
|
||||
command = args[0]
|
||||
command = command[0] if command.is_a?(Array)
|
||||
command.to_s.downcase
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -105,6 +105,21 @@ module Gitlab
|
|||
|
||||
private
|
||||
|
||||
def package_allowed_paths
|
||||
packages_config = ::Gitlab.config.packages
|
||||
return [] unless allow_packages_storage_path?(packages_config)
|
||||
|
||||
[::Packages::PackageFileUploader.workhorse_upload_path]
|
||||
end
|
||||
|
||||
def allow_packages_storage_path?(packages_config)
|
||||
return false unless packages_config.enabled
|
||||
return false unless packages_config['storage_path']
|
||||
return false if packages_config.object_store.enabled && packages_config.object_store.direct_upload
|
||||
|
||||
true
|
||||
end
|
||||
|
||||
def allowed_paths
|
||||
[
|
||||
::FileUploader.root,
|
||||
|
|
@ -112,7 +127,7 @@ module Gitlab
|
|||
JobArtifactUploader.workhorse_upload_path,
|
||||
LfsObjectUploader.workhorse_upload_path,
|
||||
File.join(Rails.root, 'public/uploads/tmp')
|
||||
]
|
||||
] + package_allowed_paths
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -135,5 +150,3 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
::Gitlab::Middleware::Multipart::Handler.prepend_if_ee('EE::Gitlab::Middleware::Multipart::Handler')
|
||||
|
|
|
|||
|
|
@ -4140,6 +4140,9 @@ msgstr ""
|
|||
msgid "Cannot have multiple Jira imports running at the same time"
|
||||
msgstr ""
|
||||
|
||||
msgid "Cannot have multiple unresolved alerts"
|
||||
msgstr ""
|
||||
|
||||
msgid "Cannot import because issues are not available in this project."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -5841,6 +5844,9 @@ msgstr ""
|
|||
msgid "Code owners"
|
||||
msgstr ""
|
||||
|
||||
msgid "CodeIntelligence|This is the definition"
|
||||
msgstr ""
|
||||
|
||||
msgid "CodeOwner|Pattern"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -22982,6 +22988,9 @@ msgid_plural "The %{type} contains the following errors:"
|
|||
msgstr[0] ""
|
||||
msgstr[1] ""
|
||||
|
||||
msgid "The .gitlab-ci.yml has been successfully created."
|
||||
msgstr ""
|
||||
|
||||
msgid "The Advanced Global Search in GitLab is a powerful search service that saves you time. Instead of creating duplicate code and wasting time, you can now search for code within other teams that can help your own project."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -23566,6 +23575,9 @@ msgstr ""
|
|||
msgid "There was an error when unsubscribing from this label."
|
||||
msgstr ""
|
||||
|
||||
msgid "There was an error while fetching configuration data."
|
||||
msgstr ""
|
||||
|
||||
msgid "There was an error while fetching value stream analytics data."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -11,43 +11,47 @@ exports[`Code navigation popover component renders popover 1`] = `
|
|||
/>
|
||||
|
||||
<div
|
||||
class="border-bottom"
|
||||
class="overflow-auto code-navigation-popover-container"
|
||||
>
|
||||
<pre
|
||||
class="border-0 bg-transparent m-0 code highlight"
|
||||
<div
|
||||
class=""
|
||||
>
|
||||
<span
|
||||
class="line"
|
||||
lang="javascript"
|
||||
<pre
|
||||
class="border-0 bg-transparent m-0 code highlight text-wrap"
|
||||
>
|
||||
<span
|
||||
class="k"
|
||||
class="line"
|
||||
lang="javascript"
|
||||
>
|
||||
function
|
||||
<span
|
||||
class="k"
|
||||
>
|
||||
function
|
||||
</span>
|
||||
<span>
|
||||
main() {
|
||||
</span>
|
||||
</span>
|
||||
<span>
|
||||
main() {
|
||||
<span
|
||||
class="line"
|
||||
lang="javascript"
|
||||
>
|
||||
<span>
|
||||
}
|
||||
</span>
|
||||
</span>
|
||||
</span>
|
||||
<span
|
||||
class="line"
|
||||
lang="javascript"
|
||||
>
|
||||
<span>
|
||||
}
|
||||
</span>
|
||||
</span>
|
||||
</pre>
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
class="popover-body"
|
||||
class="popover-body border-top"
|
||||
>
|
||||
<gl-button-stub
|
||||
category="tertiary"
|
||||
class="w-100"
|
||||
data-testid="go-to-definition-btn"
|
||||
href="http://gitlab.com/test.js#L20"
|
||||
href="http://gitlab.com/test.js"
|
||||
icon=""
|
||||
size="medium"
|
||||
target="_blank"
|
||||
|
|
|
|||
|
|
@ -26,7 +26,8 @@ const MOCK_CODE_DATA = Object.freeze({
|
|||
],
|
||||
},
|
||||
],
|
||||
definition_path: 'test.js#L20',
|
||||
definition_path: 'test.js',
|
||||
definitionLineNumber: 20,
|
||||
});
|
||||
|
||||
const MOCK_DOCS_DATA = Object.freeze({
|
||||
|
|
|
|||
|
|
@ -69,7 +69,12 @@ describe('Code navigation actions', () => {
|
|||
payload: {
|
||||
path: 'index.js',
|
||||
normalizedData: {
|
||||
'0:0': { start_line: 0, start_char: 0, hover: { value: '123' } },
|
||||
'0:0': {
|
||||
definitionLineNumber: 0,
|
||||
start_line: 0,
|
||||
start_char: 0,
|
||||
hover: { value: '123' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -91,7 +96,12 @@ describe('Code navigation actions', () => {
|
|||
payload: {
|
||||
path: 'index.js',
|
||||
normalizedData: {
|
||||
'0:0': { start_line: 0, start_char: 0, hover: { value: '123' } },
|
||||
'0:0': {
|
||||
definitionLineNumber: 0,
|
||||
start_line: 0,
|
||||
start_char: 0,
|
||||
hover: { value: '123' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -159,7 +169,9 @@ describe('Code navigation actions', () => {
|
|||
let target;
|
||||
|
||||
beforeEach(() => {
|
||||
setFixtures('<div data-path="index.js"><div class="js-test"></div></div>');
|
||||
setFixtures(
|
||||
'<div data-path="index.js"><div class="line"><div class="js-test"></div></div></div>',
|
||||
);
|
||||
target = document.querySelector('.js-test');
|
||||
});
|
||||
|
||||
|
|
@ -186,7 +198,7 @@ describe('Code navigation actions', () => {
|
|||
payload: {
|
||||
blobPath: 'index.js',
|
||||
definition: { hover: 'test' },
|
||||
position: { height: 0, x: 0, y: 0 },
|
||||
position: { height: 0, x: 0, y: 0, lineIndex: 0 },
|
||||
},
|
||||
},
|
||||
],
|
||||
|
|
@ -210,7 +222,7 @@ describe('Code navigation actions', () => {
|
|||
payload: {
|
||||
blobPath: 'index.js',
|
||||
definition: { hover: 'test' },
|
||||
position: { height: 0, x: 0, y: 0 },
|
||||
position: { height: 0, x: 0, y: 0, lineIndex: 0 },
|
||||
},
|
||||
},
|
||||
],
|
||||
|
|
@ -235,7 +247,7 @@ describe('Code navigation actions', () => {
|
|||
payload: {
|
||||
blobPath: 'index.js',
|
||||
definition: { hover: 'test' },
|
||||
position: { height: 0, x: 0, y: 0 },
|
||||
position: { height: 0, x: 0, y: 0, lineIndex: 0 },
|
||||
},
|
||||
},
|
||||
],
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ describe('setCurrentHoverElement', () => {
|
|||
describe('addInteractionClass', () => {
|
||||
beforeEach(() => {
|
||||
setFixtures(
|
||||
'<div data-path="index.js"><div class="blob-content"><div id="LC1"><span>console</span><span>.</span><span>log</span></div><div id="LC2"><span>function</span></div></div></div>',
|
||||
'<div data-path="index.js"><div class="blob-content"><div id="LC1" class="line"><span>console</span><span>.</span><span>log</span></div><div id="LC2" class="line"><span>function</span></div></div></div>',
|
||||
);
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import { __, s__ } from '~/locale';
|
||||
import $ from 'jquery';
|
||||
import timezoneMock from 'timezone-mock';
|
||||
import '~/commons/bootstrap';
|
||||
import * as datetimeUtility from '~/lib/utils/datetime_utility';
|
||||
|
||||
|
|
@ -86,6 +87,31 @@ describe('Date time utils', () => {
|
|||
datetimeUtility.formatDate('2016-07-23 00:00:00 UTC');
|
||||
}).toThrow(new Error('Invalid date'));
|
||||
});
|
||||
|
||||
describe('convert local timezone to UTC with utc parameter', () => {
|
||||
const midnightUTC = '2020-07-09';
|
||||
const format = 'mmm d, yyyy';
|
||||
|
||||
beforeEach(() => {
|
||||
timezoneMock.register('US/Pacific');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
timezoneMock.unregister();
|
||||
});
|
||||
|
||||
it('defaults to false', () => {
|
||||
const formattedDate = datetimeUtility.formatDate(midnightUTC, format);
|
||||
|
||||
expect(formattedDate).toBe('Jul 8, 2020');
|
||||
});
|
||||
|
||||
it('converts local time to UTC if utc flag is true', () => {
|
||||
const formattedDate = datetimeUtility.formatDate(midnightUTC, format, true);
|
||||
|
||||
expect(formattedDate).toBe('Jul 9, 2020');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('get day difference', () => {
|
||||
|
|
|
|||
|
|
@ -231,6 +231,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
|
|||
end
|
||||
|
||||
context 'when workflow rules is used' do
|
||||
let(:workflow) { double('workflow', 'has_rules?' => true) }
|
||||
|
||||
before do
|
||||
entry.compose!(deps)
|
||||
end
|
||||
|
||||
context 'when rules are used' do
|
||||
let(:config) { { script: 'ls', cache: { key: 'test' }, rules: [] } }
|
||||
|
||||
|
|
@ -239,11 +245,11 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when rules are not used' do
|
||||
context 'when rules are not used and only is defined' do
|
||||
let(:config) { { script: 'ls', cache: { key: 'test' }, only: [] } }
|
||||
|
||||
it 'does not define only' do
|
||||
expect(entry).not_to be_only_defined
|
||||
it 'keeps only entry' do
|
||||
expect(entry).to be_only_defined
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -435,6 +435,153 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
describe '#warnings' do
|
||||
before do
|
||||
stub_feature_flags(ci_raise_job_rules_without_workflow_rules_warning: true)
|
||||
end
|
||||
|
||||
context 'when a warning is raised in a given entry' do
|
||||
let(:config) do
|
||||
<<-EOYML
|
||||
rspec:
|
||||
script: rspec
|
||||
rules:
|
||||
- if: '$VAR == "value"'
|
||||
EOYML
|
||||
end
|
||||
|
||||
it 'is propagated all the way up to the processor' do
|
||||
expect(subject.warnings).to contain_exactly('jobs:rspec uses `rules` without defining `workflow:rules`')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a warning is raised together with errors' do
|
||||
let(:config) do
|
||||
<<-EOYML
|
||||
rspec:
|
||||
script: rspec
|
||||
rules:
|
||||
- if: '$VAR == "value"'
|
||||
invalid:
|
||||
script: echo
|
||||
artifacts:
|
||||
- wrong_key: value
|
||||
EOYML
|
||||
end
|
||||
|
||||
it 'is propagated all the way up into the raised exception' do
|
||||
expect { subject }.to raise_error do |error|
|
||||
expect(error).to be_a(described_class::ValidationError)
|
||||
expect(error.message).to eq('jobs:invalid:artifacts config should be a hash')
|
||||
expect(error.warnings).to contain_exactly('jobs:rspec uses `rules` without defining `workflow:rules`')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when error is raised before composing the config' do
|
||||
let(:config) do
|
||||
<<-EOYML
|
||||
include: unknown/file.yml
|
||||
rspec:
|
||||
script: rspec
|
||||
rules:
|
||||
- if: '$VAR == "value"'
|
||||
EOYML
|
||||
end
|
||||
|
||||
it 'raises an exception with empty warnings array' do
|
||||
expect { subject }.to raise_error do |error|
|
||||
expect(error).to be_a(described_class::ValidationError)
|
||||
expect(error.message).to eq('Local file `unknown/file.yml` does not have project!')
|
||||
expect(error.warnings).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when error is raised after composing the config with warnings' do
|
||||
shared_examples 'has warnings and expected error' do |error_message|
|
||||
it 'raises an exception including warnings' do
|
||||
expect { subject }.to raise_error do |error|
|
||||
expect(error).to be_a(described_class::ValidationError)
|
||||
expect(error.message).to match(error_message)
|
||||
expect(error.warnings).to be_present
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when stage does not exist' do
|
||||
let(:config) do
|
||||
<<-EOYML
|
||||
rspec:
|
||||
stage: custom_stage
|
||||
script: rspec
|
||||
rules:
|
||||
- if: '$VAR == "value"'
|
||||
EOYML
|
||||
end
|
||||
|
||||
it_behaves_like 'has warnings and expected error', /rspec job: chosen stage does not exist/
|
||||
end
|
||||
|
||||
context 'job dependency does not exist' do
|
||||
let(:config) do
|
||||
<<-EOYML
|
||||
build:
|
||||
stage: build
|
||||
script: echo
|
||||
rules:
|
||||
- if: '$VAR == "value"'
|
||||
test:
|
||||
stage: test
|
||||
script: echo
|
||||
needs: [unknown_job]
|
||||
EOYML
|
||||
end
|
||||
|
||||
it_behaves_like 'has warnings and expected error', /test job: undefined need: unknown_job/
|
||||
end
|
||||
|
||||
context 'job dependency defined in later stage' do
|
||||
let(:config) do
|
||||
<<-EOYML
|
||||
build:
|
||||
stage: build
|
||||
script: echo
|
||||
needs: [test]
|
||||
rules:
|
||||
- if: '$VAR == "value"'
|
||||
test:
|
||||
stage: test
|
||||
script: echo
|
||||
EOYML
|
||||
end
|
||||
|
||||
it_behaves_like 'has warnings and expected error', /build job: need test is not defined in prior stages/
|
||||
end
|
||||
end
|
||||
|
||||
context 'when feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_raise_job_rules_without_workflow_rules_warning: false)
|
||||
end
|
||||
|
||||
context 'job rules used without workflow rules' do
|
||||
let(:config) do
|
||||
<<-EOYML
|
||||
rspec:
|
||||
script: rspec
|
||||
rules:
|
||||
- if: '$VAR == "value"'
|
||||
EOYML
|
||||
end
|
||||
|
||||
it 'does not raise the warning' do
|
||||
expect(subject.warnings).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'only / except policies validations' do
|
||||
context 'when `only` has an invalid value' do
|
||||
let(:config) { { rspec: { script: "rspec", type: "test", only: only } } }
|
||||
|
|
@ -2517,7 +2664,7 @@ module Gitlab
|
|||
it 'returns errors and empty configuration' do
|
||||
expect(subject.valid?).to eq(false)
|
||||
expect(subject.errors).to eq(['Invalid configuration format'])
|
||||
expect(subject.content).to be_blank
|
||||
expect(subject.config).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -2527,7 +2674,7 @@ module Gitlab
|
|||
it 'returns errors and empty configuration' do
|
||||
expect(subject.valid?).to eq(false)
|
||||
expect(subject.errors).to eq(['jobs:rspec:tags config should be an array of strings'])
|
||||
expect(subject.content).to be_blank
|
||||
expect(subject.config).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -2539,7 +2686,7 @@ module Gitlab
|
|||
expect(subject.errors).to contain_exactly(
|
||||
'jobs:rspec config contains unknown keys: bad_tags',
|
||||
'jobs:rspec rules should be an array of hashes')
|
||||
expect(subject.content).to be_blank
|
||||
expect(subject.config).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -2549,7 +2696,7 @@ module Gitlab
|
|||
it 'returns errors and empty configuration' do
|
||||
expect(subject.valid?).to eq(false)
|
||||
expect(subject.errors).to eq(['Please provide content of .gitlab-ci.yml'])
|
||||
expect(subject.content).to be_blank
|
||||
expect(subject.config).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -2559,7 +2706,7 @@ module Gitlab
|
|||
it 'returns errors and empty configuration' do
|
||||
expect(subject.valid?).to eq(false)
|
||||
expect(subject.errors).to eq(['Unknown alias: bad_alias'])
|
||||
expect(subject.content).to be_blank
|
||||
expect(subject.config).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -2569,7 +2716,7 @@ module Gitlab
|
|||
it 'returns errors and empty configuration' do
|
||||
expect(subject.valid?).to eq(true)
|
||||
expect(subject.errors).to be_empty
|
||||
expect(subject.content).to be_present
|
||||
expect(subject.config).to be_present
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -167,7 +167,7 @@ RSpec.describe Gitlab::Danger::Helper do
|
|||
|
||||
describe '#categories_for_file' do
|
||||
where(:path, :expected_categories) do
|
||||
'doc/foo' | [:docs]
|
||||
'doc/foo.md' | [:docs]
|
||||
'CONTRIBUTING.md' | [:docs]
|
||||
'LICENSE' | [:docs]
|
||||
'MAINTENANCE.md' | [:docs]
|
||||
|
|
|
|||
|
|
@ -47,15 +47,15 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
|
|||
let(:instrumentation_class) { Gitlab::Redis::SharedState.instrumentation_class }
|
||||
|
||||
it 'counts successful requests' do
|
||||
expect(instrumentation_class).to receive(:count_request).and_call_original
|
||||
expect(instrumentation_class).to receive(:instance_count_request).and_call_original
|
||||
|
||||
Gitlab::Redis::SharedState.with { |redis| redis.call(:get, 'foobar') }
|
||||
end
|
||||
|
||||
it 'counts exceptions' do
|
||||
expect(instrumentation_class).to receive(:count_exception)
|
||||
expect(instrumentation_class).to receive(:instance_count_exception)
|
||||
.with(instance_of(Redis::CommandError)).and_call_original
|
||||
expect(instrumentation_class).to receive(:count_request).and_call_original
|
||||
expect(instrumentation_class).to receive(:instance_count_request).and_call_original
|
||||
|
||||
expect do
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
|
|
@ -64,4 +64,51 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
|
|||
end.to raise_exception(Redis::CommandError)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'latency' do
|
||||
let(:instrumentation_class) { Gitlab::Redis::SharedState.instrumentation_class }
|
||||
|
||||
describe 'commands in the apdex' do
|
||||
where(:command) do
|
||||
[
|
||||
[[:get, 'foobar']],
|
||||
[%w[GET foobar]]
|
||||
]
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'measures requests we want in the apdex' do
|
||||
expect(instrumentation_class).to receive(:instance_observe_duration).with(a_value > 0)
|
||||
.and_call_original
|
||||
|
||||
Gitlab::Redis::SharedState.with { |redis| redis.call(*command) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'commands not in the apdex' do
|
||||
where(:command) do
|
||||
[
|
||||
[%w[brpop foobar 0.01]],
|
||||
[%w[blpop foobar 0.01]],
|
||||
[%w[brpoplpush foobar bazqux 0.01]],
|
||||
[%w[bzpopmin foobar 0.01]],
|
||||
[%w[bzpopmax foobar 0.01]],
|
||||
[%w[xread block 1 streams mystream 0-0]],
|
||||
[%w[xreadgroup group mygroup myconsumer block 1 streams foobar 0-0]]
|
||||
]
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'skips requests we do not want in the apdex' do
|
||||
expect(instrumentation_class).not_to receive(:instance_observe_duration)
|
||||
|
||||
begin
|
||||
Gitlab::Redis::SharedState.with { |redis| redis.call(*command) }
|
||||
rescue Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError, ::Redis::CommandError
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -232,4 +232,82 @@ RSpec.describe Gitlab::Middleware::Multipart do
|
|||
middleware.call(env)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#call' do
|
||||
context 'with packages storage' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
let(:storage_path) { 'shared/packages' }
|
||||
|
||||
RSpec.shared_examples 'allowing the multipart upload' do
|
||||
it 'allows files to be uploaded' do
|
||||
with_tmp_dir('tmp/uploads', storage_path) do |dir, env|
|
||||
allow(Packages::PackageFileUploader).to receive(:root).and_return(File.join(dir, storage_path))
|
||||
|
||||
expect(app).to receive(:call) do |env|
|
||||
expect(get_params(env)['file']).to be_a(::UploadedFile)
|
||||
end
|
||||
|
||||
middleware.call(env)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
RSpec.shared_examples 'not allowing the multipart upload when package upload path is used' do
|
||||
it 'does not allow files to be uploaded' do
|
||||
with_tmp_dir('tmp/uploads', storage_path) do |dir, env|
|
||||
# with_tmp_dir sets the same workhorse_upload_path for all Uploaders,
|
||||
# so we have to prevent JobArtifactUploader and LfsObjectUploader to
|
||||
# allow the tested path
|
||||
allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(Dir.tmpdir)
|
||||
allow(LfsObjectUploader).to receive(:workhorse_upload_path).and_return(Dir.tmpdir)
|
||||
|
||||
status, headers, body = middleware.call(env)
|
||||
|
||||
expect(status).to eq(400)
|
||||
expect(headers).to eq({ 'Content-Type' => 'text/plain' })
|
||||
expect(body).to start_with('insecure path used')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
RSpec.shared_examples 'adding package storage to multipart allowed paths' do
|
||||
before do
|
||||
expect(::Packages::PackageFileUploader).to receive(:workhorse_upload_path).and_call_original
|
||||
end
|
||||
|
||||
it_behaves_like 'allowing the multipart upload'
|
||||
end
|
||||
|
||||
RSpec.shared_examples 'not adding package storage to multipart allowed paths' do
|
||||
before do
|
||||
expect(::Packages::PackageFileUploader).not_to receive(:workhorse_upload_path)
|
||||
end
|
||||
|
||||
it_behaves_like 'not allowing the multipart upload when package upload path is used'
|
||||
end
|
||||
|
||||
where(:object_storage_enabled, :direct_upload_enabled, :example_name) do
|
||||
false | true | 'adding package storage to multipart allowed paths'
|
||||
false | false | 'adding package storage to multipart allowed paths'
|
||||
true | true | 'not adding package storage to multipart allowed paths'
|
||||
true | false | 'adding package storage to multipart allowed paths'
|
||||
end
|
||||
|
||||
with_them do
|
||||
before do
|
||||
stub_config(packages: {
|
||||
enabled: true,
|
||||
object_store: {
|
||||
enabled: object_storage_enabled,
|
||||
direct_upload: direct_upload_enabled
|
||||
},
|
||||
storage_path: storage_path
|
||||
})
|
||||
end
|
||||
|
||||
it_behaves_like params[:example_name]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,57 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require Rails.root.join('db', 'migrate', '20200706035141_adjust_unique_index_alert_management_alerts.rb')
|
||||
|
||||
RSpec.describe AdjustUniqueIndexAlertManagementAlerts, :migration do
|
||||
let(:migration) { described_class.new }
|
||||
let(:alerts) { AlertManagement::Alert }
|
||||
let(:project) { create_project }
|
||||
let(:other_project) { create_project }
|
||||
let(:resolved_state) { 2 }
|
||||
let(:triggered_state) { 1 }
|
||||
let!(:existing_alert) { create_alert(project, resolved_state, '1234', 1) }
|
||||
let!(:p2_alert) { create_alert(other_project, resolved_state, '1234', 1) }
|
||||
let!(:p2_alert_diff_fingerprint) { create_alert(other_project, resolved_state, '4567', 2) }
|
||||
|
||||
it 'can reverse the migration' do
|
||||
expect(existing_alert.fingerprint).not_to eq(nil)
|
||||
expect(p2_alert.fingerprint).not_to eq(nil)
|
||||
expect(p2_alert_diff_fingerprint.fingerprint).not_to eq(nil)
|
||||
|
||||
migrate!
|
||||
|
||||
# Adding a second alert with the same fingerprint now that we can
|
||||
second_alert = create_alert(project, triggered_state, '1234', 2)
|
||||
expect(alerts.count).to eq(4)
|
||||
|
||||
schema_migrate_down!
|
||||
|
||||
# We keep the alerts, but the oldest ones fingerprint is removed
|
||||
expect(alerts.count).to eq(4)
|
||||
expect(second_alert.reload.fingerprint).not_to eq(nil)
|
||||
expect(p2_alert.fingerprint).not_to eq(nil)
|
||||
expect(p2_alert_diff_fingerprint.fingerprint).not_to eq(nil)
|
||||
expect(existing_alert.reload.fingerprint).to eq(nil)
|
||||
end
|
||||
|
||||
def namespace
|
||||
@namespace ||= table(:namespaces).create!(name: 'foo', path: 'foo')
|
||||
end
|
||||
|
||||
def create_project
|
||||
table(:projects).create!(namespace_id: namespace.id)
|
||||
end
|
||||
|
||||
def create_alert(project, status, fingerprint, iid)
|
||||
params = {
|
||||
title: 'test',
|
||||
started_at: Time.current,
|
||||
iid: iid,
|
||||
project_id: project.id,
|
||||
status: status,
|
||||
fingerprint: fingerprint
|
||||
}
|
||||
table(:alert_management_alerts).create!(params)
|
||||
end
|
||||
end
|
||||
|
|
@ -83,21 +83,50 @@ RSpec.describe AlertManagement::Alert do
|
|||
end
|
||||
|
||||
describe 'fingerprint' do
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:fingerprint) { 'fingerprint' }
|
||||
let_it_be(:existing_alert) { create(:alert_management_alert, fingerprint: fingerprint) }
|
||||
let(:new_alert) { build(:alert_management_alert, fingerprint: fingerprint, project: project) }
|
||||
|
||||
subject { new_alert }
|
||||
|
||||
context 'adding an alert with the same fingerprint' do
|
||||
context 'same project' do
|
||||
let(:project) { existing_alert.project }
|
||||
context 'same project, various states' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
it { is_expected.not_to be_valid }
|
||||
# We are only validating uniqueness for non-resolved alerts
|
||||
where(:existing_status, :new_status, :valid) do
|
||||
:resolved | :triggered | true
|
||||
:resolved | :acknowledged | true
|
||||
:resolved | :ignored | true
|
||||
:resolved | :resolved | true
|
||||
:triggered | :triggered | false
|
||||
:triggered | :acknowledged | false
|
||||
:triggered | :ignored | false
|
||||
:triggered | :resolved | true
|
||||
:acknowledged | :triggered | false
|
||||
:acknowledged | :acknowledged | false
|
||||
:acknowledged | :ignored | false
|
||||
:acknowledged | :resolved | true
|
||||
:ignored | :triggered | false
|
||||
:ignored | :acknowledged | false
|
||||
:ignored | :ignored | false
|
||||
:ignored | :resolved | true
|
||||
end
|
||||
|
||||
with_them do
|
||||
let!(:existing_alert) { create(:alert_management_alert, existing_status, fingerprint: fingerprint, project: project) }
|
||||
let(:new_alert) { build(:alert_management_alert, new_status, fingerprint: fingerprint, project: project) }
|
||||
|
||||
if params[:valid]
|
||||
it { is_expected.to be_valid }
|
||||
else
|
||||
it { is_expected.to be_invalid }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'different project' do
|
||||
let(:project) { create(:project) }
|
||||
let!(:existing_alert) { create(:alert_management_alert, fingerprint: fingerprint) }
|
||||
|
||||
it { is_expected.to be_valid }
|
||||
end
|
||||
|
|
|
|||
|
|
@ -910,6 +910,14 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
|
|||
expect(user.reload.bio).to eq('')
|
||||
end
|
||||
|
||||
it 'updates user with nil bio' do
|
||||
put api("/users/#{user.id}", admin), params: { bio: nil }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['bio']).to eq('')
|
||||
expect(user.reload.bio).to eq('')
|
||||
end
|
||||
|
||||
it "updates user with new password and forces reset on next login" do
|
||||
put api("/users/#{user.id}", admin), params: { password: '12345678' }
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,114 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::CreatePipelineService do
|
||||
describe 'creation errors and warnings' do
|
||||
let_it_be(:user) { create(:admin) }
|
||||
let_it_be(:project) { create(:project, :repository, creator: user) }
|
||||
|
||||
let(:ref) { 'refs/heads/master' }
|
||||
let(:source) { :push }
|
||||
let(:service) { described_class.new(project, user, { ref: ref }) }
|
||||
let(:pipeline) { service.execute(source) }
|
||||
|
||||
before do
|
||||
stub_ci_pipeline_yaml_file(config)
|
||||
stub_feature_flags(ci_raise_job_rules_without_workflow_rules_warning: true)
|
||||
end
|
||||
|
||||
context 'when created successfully' do
|
||||
context 'when warnings are raised' do
|
||||
let(:config) do
|
||||
<<~YAML
|
||||
test:
|
||||
script: rspec
|
||||
rules:
|
||||
- if: '$CI_COMMIT_BRANCH'
|
||||
YAML
|
||||
end
|
||||
|
||||
it 'contains only warnings' do
|
||||
expect(pipeline.error_messages.map(&:content)).to be_empty
|
||||
|
||||
expect(pipeline.warning_messages.map(&:content)).to contain_exactly(
|
||||
'jobs:test uses `rules` without defining `workflow:rules`'
|
||||
)
|
||||
end
|
||||
|
||||
context 'when feature flag is disabled for the particular warning' do
|
||||
before do
|
||||
stub_feature_flags(ci_raise_job_rules_without_workflow_rules_warning: false)
|
||||
end
|
||||
|
||||
it 'does not contain warnings' do
|
||||
expect(pipeline.error_messages.map(&:content)).to be_empty
|
||||
|
||||
expect(pipeline.warning_messages.map(&:content)).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when no warnings are raised' do
|
||||
let(:config) do
|
||||
<<~YAML
|
||||
test:
|
||||
script: rspec
|
||||
YAML
|
||||
end
|
||||
|
||||
it 'contains no warnings' do
|
||||
expect(pipeline.error_messages).to be_empty
|
||||
|
||||
expect(pipeline.warning_messages).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when failed to create the pipeline' do
|
||||
context 'when warnings are raised' do
|
||||
let(:config) do
|
||||
<<~YAML
|
||||
build:
|
||||
stage: build
|
||||
script: echo
|
||||
needs: [test]
|
||||
test:
|
||||
stage: test
|
||||
script: echo
|
||||
rules:
|
||||
- if: '$CI_COMMIT_BRANCH'
|
||||
YAML
|
||||
end
|
||||
|
||||
it 'contains both errors and warnings' do
|
||||
error_message = 'build job: need test is not defined in prior stages'
|
||||
warning_message = 'jobs:test uses `rules` without defining `workflow:rules`'
|
||||
|
||||
expect(pipeline.yaml_errors).to eq(error_message)
|
||||
expect(pipeline.error_messages.map(&:content)).to contain_exactly(error_message)
|
||||
expect(pipeline.errors.full_messages).to contain_exactly(error_message)
|
||||
|
||||
expect(pipeline.warning_messages.map(&:content)).to contain_exactly(warning_message)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when no warnings are raised' do
|
||||
let(:config) do
|
||||
<<~YAML
|
||||
invalid: yaml
|
||||
YAML
|
||||
end
|
||||
|
||||
it 'contains only errors' do
|
||||
error_message = 'root config contains unknown keys: invalid'
|
||||
expect(pipeline.yaml_errors).to eq(error_message)
|
||||
expect(pipeline.error_messages.map(&:content)).to contain_exactly(error_message)
|
||||
expect(pipeline.errors.full_messages).to contain_exactly(error_message)
|
||||
|
||||
expect(pipeline.warning_messages).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -85,5 +85,29 @@ RSpec.describe Jira::JqlBuilderService do
|
|||
expect(subject).to eq('project = PROJECT_KEY order by updated ASC')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with opened state param' do
|
||||
let(:params) { { state: 'opened' } }
|
||||
|
||||
it 'builds jql' do
|
||||
expect(subject).to eq('project = PROJECT_KEY AND statusCategory != Done order by created DESC')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with closed state param' do
|
||||
let(:params) { { state: 'closed' } }
|
||||
|
||||
it 'builds jql' do
|
||||
expect(subject).to eq('project = PROJECT_KEY AND statusCategory = Done order by created DESC')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with any other state param' do
|
||||
let(:params) { { state: 'all' } }
|
||||
|
||||
it 'builds jql' do
|
||||
expect(subject).to eq('project = PROJECT_KEY order by created DESC')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ RSpec.describe Members::DestroyService do
|
|||
before do
|
||||
type = member.is_a?(GroupMember) ? 'Group' : 'Project'
|
||||
expect(TodosDestroyer::EntityLeaveWorker).to receive(:perform_in).with(Todo::WAIT_FOR_DELETE, member.user_id, member.source_id, type)
|
||||
expect(MembersDestroyer::UnassignIssuablesWorker).to receive(:perform_async).with(member.user_id, member.source_id, type) if opts[:unassign_issuables]
|
||||
end
|
||||
|
||||
it 'destroys the member' do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe UpdateContainerRegistryInfoWorker do
|
||||
describe '#perform' do
|
||||
it 'calls UpdateContainerRegistryInfoService' do
|
||||
expect_next_instance_of(UpdateContainerRegistryInfoService) do |service|
|
||||
expect(service).to receive(:execute)
|
||||
end
|
||||
|
||||
subject.perform
|
||||
end
|
||||
end
|
||||
end
|
||||
Loading…
Reference in New Issue