Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-07-07 18:08:30 +00:00
parent e1e58fabfa
commit 59776dd803
72 changed files with 2497 additions and 237 deletions

View File

@ -1 +1 @@
13.20.0
13.19.0

View File

@ -29,6 +29,7 @@ export default {
issuableAttributesQueries,
i18n: {
[IssuableAttributeType.Milestone]: __('Milestone'),
expired: __('(expired)'),
none: __('None'),
},
directives: {
@ -74,9 +75,14 @@ export default {
type: String,
required: true,
validator(value) {
return value === IssuableType.Issue;
return [IssuableType.Issue, IssuableType.MergeRequest].includes(value);
},
},
icon: {
type: String,
required: false,
default: undefined,
},
},
apollo: {
currentAttribute: {
@ -172,6 +178,9 @@ export default {
attributeTypeTitle() {
return this.$options.i18n[this.issuableAttribute];
},
attributeTypeIcon() {
return this.icon || this.issuableAttribute;
},
i18n() {
return {
noAttribute: sprintf(s__('DropdownWidget|No %{issuableAttribute}'), {
@ -224,7 +233,8 @@ export default {
variables: {
fullPath: this.workspacePath,
attributeId:
this.issuableAttribute === IssuableAttributeType.Milestone
this.issuableAttribute === IssuableAttributeType.Milestone &&
this.issuableType === IssuableType.Issue
? getIdFromGraphQLId(attributeId)
: attributeId,
iid: this.iid,
@ -255,6 +265,11 @@ export default {
attributeId === this.currentAttribute?.id || (!this.currentAttribute?.id && !attributeId)
);
},
isAttributeOverdue(attribute) {
return this.issuableAttribute === IssuableAttributeType.Milestone
? attribute?.expired
: false;
},
showDropdown() {
this.$refs.newDropdown.show();
},
@ -284,8 +299,10 @@ export default {
>
<template #collapsed>
<div v-if="isClassicSidebar" v-gl-tooltip class="sidebar-collapsed-icon">
<gl-icon :size="16" :aria-label="attributeTypeTitle" :name="issuableAttribute" />
<span class="collapse-truncated-title">{{ attributeTitle }}</span>
<gl-icon :size="16" :aria-label="attributeTypeTitle" :name="attributeTypeIcon" />
<span class="collapse-truncated-title">
{{ attributeTitle }}
</span>
</div>
<div
:data-testid="`select-${issuableAttribute}`"
@ -308,6 +325,7 @@ export default {
:data-qa-selector="`${issuableAttribute}_link`"
>
{{ attributeTitle }}
<span v-if="isAttributeOverdue(currentAttribute)">{{ $options.i18n.expired }}</span>
</gl-link>
</slot>
</div>
@ -358,6 +376,7 @@ export default {
@click="updateAttribute(attrItem.id)"
>
{{ attrItem.title }}
<span v-if="isAttributeOverdue(attrItem)">{{ $options.i18n.expired }}</span>
</gl-dropdown-item>
</slot>
</template>

View File

@ -12,6 +12,7 @@ import issueDueDateQuery from '~/sidebar/queries/issue_due_date.query.graphql';
import issueReferenceQuery from '~/sidebar/queries/issue_reference.query.graphql';
import issueSubscribedQuery from '~/sidebar/queries/issue_subscribed.query.graphql';
import issueTimeTrackingQuery from '~/sidebar/queries/issue_time_tracking.query.graphql';
import mergeRequestMilestone from '~/sidebar/queries/merge_request_milestone.query.graphql';
import mergeRequestReferenceQuery from '~/sidebar/queries/merge_request_reference.query.graphql';
import mergeRequestSubscribed from '~/sidebar/queries/merge_request_subscribed.query.graphql';
import mergeRequestTimeTrackingQuery from '~/sidebar/queries/merge_request_time_tracking.query.graphql';
@ -24,6 +25,7 @@ import updateEpicSubscriptionMutation from '~/sidebar/queries/update_epic_subscr
import updateIssueConfidentialMutation from '~/sidebar/queries/update_issue_confidential.mutation.graphql';
import updateIssueDueDateMutation from '~/sidebar/queries/update_issue_due_date.mutation.graphql';
import updateIssueSubscriptionMutation from '~/sidebar/queries/update_issue_subscription.mutation.graphql';
import mergeRequestMilestoneMutation from '~/sidebar/queries/update_merge_request_milestone.mutation.graphql';
import updateMergeRequestSubscriptionMutation from '~/sidebar/queries/update_merge_request_subscription.mutation.graphql';
import updateAlertAssigneesMutation from '~/vue_shared/alert_details/graphql/mutations/alert_set_assignees.mutation.graphql';
import getAlertAssignees from '~/vue_shared/components/sidebar/queries/get_alert_assignees.query.graphql';
@ -171,12 +173,19 @@ export const issuableMilestoneQueries = {
query: projectIssueMilestoneQuery,
mutation: projectIssueMilestoneMutation,
},
[IssuableType.MergeRequest]: {
query: mergeRequestMilestone,
mutation: mergeRequestMilestoneMutation,
},
};
export const milestonesQueries = {
[IssuableType.Issue]: {
query: projectMilestonesQuery,
},
[IssuableType.MergeRequest]: {
query: projectMilestonesQuery,
},
};
export const IssuableAttributeType = {

View File

@ -18,6 +18,7 @@ import SidebarConfidentialityWidget from '~/sidebar/components/confidential/side
import SidebarDueDateWidget from '~/sidebar/components/date/sidebar_date_widget.vue';
import SidebarParticipantsWidget from '~/sidebar/components/participants/sidebar_participants_widget.vue';
import SidebarReferenceWidget from '~/sidebar/components/reference/sidebar_reference_widget.vue';
import SidebarDropdownWidget from '~/sidebar/components/sidebar_dropdown_widget.vue';
import { apolloProvider } from '~/sidebar/graphql';
import trackShowInviteMemberLink from '~/sidebar/track_invite_members';
import Translate from '../vue_shared/translate';
@ -29,6 +30,7 @@ import SidebarReviewers from './components/reviewers/sidebar_reviewers.vue';
import SidebarSeverity from './components/severity/sidebar_severity.vue';
import SidebarSubscriptionsWidget from './components/subscriptions/sidebar_subscriptions_widget.vue';
import SidebarTimeTracking from './components/time_tracking/sidebar_time_tracking.vue';
import { IssuableAttributeType } from './constants';
import SidebarMoveIssue from './lib/sidebar_move_issue';
Vue.use(Translate);
@ -154,7 +156,8 @@ function mountReviewersComponent(mediator) {
issuableIid: String(iid),
projectPath: fullPath,
field: el.dataset.field,
issuableType: isInIssuePage() || isInDesignPage() ? 'issue' : 'merge_request',
issuableType:
isInIssuePage() || isInDesignPage() ? IssuableType.Issue : IssuableType.MergeRequest,
},
}),
});
@ -166,6 +169,40 @@ function mountReviewersComponent(mediator) {
}
}
function mountMilestoneSelect() {
const el = document.querySelector('.js-milestone-select');
if (!el) {
return false;
}
const { canEdit, projectPath, issueIid } = el.dataset;
return new Vue({
el,
apolloProvider,
components: {
SidebarDropdownWidget,
},
provide: {
canUpdate: parseBoolean(canEdit),
isClassicSidebar: true,
},
render: (createElement) =>
createElement('sidebar-dropdown-widget', {
props: {
attrWorkspacePath: projectPath,
workspacePath: projectPath,
iid: issueIid,
issuableType:
isInIssuePage() || isInDesignPage() ? IssuableType.Issue : IssuableType.MergeRequest,
issuableAttribute: IssuableAttributeType.Milestone,
icon: 'clock',
},
}),
});
}
export function mountSidebarLabels() {
const el = document.querySelector('.js-sidebar-labels');
@ -466,6 +503,7 @@ export function mountSidebar(mediator) {
mountAssigneesComponentDeprecated(mediator);
}
mountReviewersComponent(mediator);
mountMilestoneSelect();
mountConfidentialComponent(mediator);
mountDueDateComponent(mediator);
mountReferenceComponent(mediator);

View File

@ -0,0 +1,14 @@
#import "./milestone.fragment.graphql"
query mergeRequestMilestone($fullPath: ID!, $iid: String!) {
workspace: project(fullPath: $fullPath) {
__typename
issuable: mergeRequest(iid: $iid) {
__typename
id
attribute: milestone {
...MilestoneFragment
}
}
}
}

View File

@ -2,4 +2,5 @@ fragment MilestoneFragment on Milestone {
id
title
webUrl: webPath
expired
}

View File

@ -11,6 +11,7 @@ mutation projectIssueMilestoneMutation($fullPath: ID!, $iid: String!, $attribute
title
id
state
expired
}
}
}

View File

@ -3,7 +3,13 @@
query projectMilestones($fullPath: ID!, $title: String, $state: MilestoneStateEnum) {
workspace: project(fullPath: $fullPath) {
__typename
attributes: milestones(searchTitle: $title, state: $state) {
attributes: milestones(
searchTitle: $title
state: $state
sort: EXPIRED_LAST_DUE_DATE_ASC
first: 20
includeAncestors: true
) {
nodes {
...MilestoneFragment
state

View File

@ -0,0 +1,17 @@
mutation mergeRequestSetMilestone($fullPath: ID!, $iid: String!, $attributeId: ID) {
issuableSetAttribute: mergeRequestSetMilestone(
input: { projectPath: $fullPath, iid: $iid, milestoneId: $attributeId }
) {
__typename
errors
issuable: mergeRequest {
__typename
id
attribute: milestone {
title
id
state
}
}
}
}

View File

@ -25,6 +25,10 @@ class Compare
@straight = straight
end
def cache_key
[@project, :compare, diff_refs.hash]
end
def commits
@commits ||= Commit.decorate(@compare.commits, project)
end

View File

@ -3,20 +3,32 @@
class LfsDownloadObject
include ActiveModel::Validations
attr_accessor :oid, :size, :link
attr_accessor :oid, :size, :link, :headers
delegate :sanitized_url, :credentials, to: :sanitized_uri
validates :oid, format: { with: /\A\h{64}\z/ }
validates :size, numericality: { greater_than_or_equal_to: 0 }
validates :link, public_url: { protocols: %w(http https) }
validate :headers_must_be_hash
def initialize(oid:, size:, link:)
def initialize(oid:, size:, link:, headers: {})
@oid = oid
@size = size
@link = link
@headers = headers || {}
end
def sanitized_uri
@sanitized_uri ||= Gitlab::UrlSanitizer.new(link)
end
def has_authorization_header?
headers.keys.map(&:downcase).include?('authorization')
end
private
def headers_must_be_hash
errors.add(:base, "headers must be a Hash") unless headers.is_a?(Hash)
end
end

View File

@ -360,7 +360,7 @@ class MergeRequest < ApplicationRecord
scope :preload_approved_by_users, -> { preload(:approved_by_users) }
scope :preload_metrics, -> (relation) { preload(metrics: relation) }
scope :preload_project_and_latest_diff, -> { preload(:source_project, :latest_merge_request_diff) }
scope :preload_latest_diff_commit, -> { preload(latest_merge_request_diff: :merge_request_diff_commits) }
scope :preload_latest_diff_commit, -> { preload(latest_merge_request_diff: { merge_request_diff_commits: [:commit_author, :committer] }) }
scope :preload_milestoneish_associations, -> { preload_routables.preload(:assignees, :labels) }
scope :with_web_entity_associations, -> { preload(:author, target_project: [:project_feature, group: [:route, :parent], namespace: :route]) }

View File

@ -0,0 +1,95 @@
# frozen_string_literal: true
class MergeRequest::DiffCommitUser < ApplicationRecord
validates :name, length: { maximum: 512 }
validates :email, length: { maximum: 512 }
validates :name, presence: true, unless: :email
validates :email, presence: true, unless: :name
# Prepares a value to be inserted into a column in the table
# `merge_request_diff_commit_users`. Values in this table are limited to
# 512 characters.
#
# We treat empty strings as NULL values, as there's no point in (for
# example) storing a row where both the name and Email are an empty
# string. In addition, if we treated them differently we could end up with
# two rows: one where field X is NULL, and one where field X is an empty
# string. This is redundant, so we avoid storing such data.
def self.prepare(value)
value.present? ? value[0..511] : nil
end
# Creates a new row, or returns an existing one if a row already exists.
def self.find_or_create(name, email)
find_or_create_by!(name: name, email: email)
rescue ActiveRecord::RecordNotUnique
retry
end
# Finds many (name, email) pairs in bulk.
def self.bulk_find(pairs)
queries = {}
rows = []
pairs.each do |(name, email)|
queries[[name, email]] = where(name: name, email: email).to_sql
end
# We may end up having to query many users. To ensure we don't hit any
# query size limits, we get a fixed number of users at a time.
queries.values.each_slice(1_000).map do |slice|
rows.concat(from("(#{slice.join("\nUNION ALL\n")}) #{table_name}").to_a)
end
rows
end
# Finds or creates rows for the given pairs of names and Emails.
#
# The `names_and_emails` argument must be an Array/Set of tuples like so:
#
# [
# [name, email],
# [name, email],
# ...
# ]
#
# This method expects that the names and Emails have already been trimmed to
# at most 512 characters.
#
# The return value is a Hash that maps these tuples to instances of this
# model.
def self.bulk_find_or_create(pairs)
mapping = {}
create = []
# Over time, fewer new rows need to be created. We take advantage of that
# here by first finding all rows that already exist, using a limited number
# of queries (in most cases only one query will be needed).
bulk_find(pairs).each do |row|
mapping[[row.name, row.email]] = row
end
pairs.each do |(name, email)|
create << { name: name, email: email } unless mapping[[name, email]]
end
return mapping if create.empty?
# Sometimes we may need to insert new users into the table. We do this in
# bulk, so we only need one INSERT for all missing users.
insert_all(create, returning: %w[id name email]).each do |row|
mapping[[row['name'], row['email']]] =
new(id: row['id'], name: row['name'], email: row['email'])
end
# It's possible for (name, email) pairs to be inserted concurrently,
# resulting in the above insert not returning anything. Here we get any
# remaining users that were created concurrently.
bulk_find(pairs.reject { |pair| mapping.key?(pair) }).each do |row|
mapping[[row.name, row.email]] = row
end
mapping
end
end

View File

@ -701,7 +701,7 @@ class MergeRequestDiff < ApplicationRecord
end
def load_commits(limit: nil)
commits = merge_request_diff_commits.limit(limit)
commits = merge_request_diff_commits.with_users.limit(limit)
.map { |commit| Commit.from_hash(commit.to_hash, project) }
CommitCollection

View File

@ -9,21 +9,51 @@ class MergeRequestDiffCommit < ApplicationRecord
belongs_to :merge_request_diff
# This relation is called `commit_author` and not `author`, as the project
# import/export logic treats relations named `author` as instances of the
# `User` class.
#
# NOTE: these columns are _not_ indexed, nor do they use foreign keys.
#
# This is deliberate, as creating these indexes on GitLab.com takes a _very_
# long time. In addition, there's no real need for them either based on how
# this data is used.
#
# For more information, refer to the following:
#
# - https://gitlab.com/gitlab-com/gl-infra/production/-/issues/5038#note_614592881
# - https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63669
belongs_to :commit_author, class_name: 'MergeRequest::DiffCommitUser'
belongs_to :committer, class_name: 'MergeRequest::DiffCommitUser'
sha_attribute :sha
alias_attribute :id, :sha
serialize :trailers, Serializers::Json # rubocop:disable Cop/ActiveRecordSerialize
validates :trailers, json_schema: { filename: 'git_trailers' }
scope :with_users, -> { preload(:commit_author, :committer) }
# A list of keys of which their values need to be trimmed before they can be
# inserted into the merge_request_diff_commit_users table.
TRIM_USER_KEYS =
%i[author_name author_email committer_name committer_email].freeze
# Deprecated; use `bulk_insert!` from `BulkInsertSafe` mixin instead.
# cf. https://gitlab.com/gitlab-org/gitlab/issues/207989 for progress
def self.create_bulk(merge_request_diff_id, commits)
rows = commits.map.with_index do |commit, index|
# See #parent_ids.
commit_hash = commit.to_hash.except(:parent_ids)
commit_hashes, user_tuples = prepare_commits_for_bulk_insert(commits)
users = MergeRequest::DiffCommitUser.bulk_find_or_create(user_tuples)
rows = commit_hashes.map.with_index do |commit_hash, index|
sha = commit_hash.delete(:id)
author = users[[commit_hash[:author_name], commit_hash[:author_email]]]
committer =
users[[commit_hash[:committer_name], commit_hash[:committer_email]]]
commit_hash.merge(
commit_author_id: author&.id,
committer_id: committer&.id,
merge_request_diff_id: merge_request_diff_id,
relative_order: index,
sha: Gitlab::Database::ShaAttribute.serialize(sha), # rubocop:disable Cop/ActiveRecordSerialize
@ -36,6 +66,24 @@ class MergeRequestDiffCommit < ApplicationRecord
Gitlab::Database.bulk_insert(self.table_name, rows) # rubocop:disable Gitlab/BulkInsert
end
def self.prepare_commits_for_bulk_insert(commits)
user_tuples = Set.new
hashes = commits.map do |commit|
hash = commit.to_hash.except(:parent_ids)
TRIM_USER_KEYS.each do |key|
hash[key] = MergeRequest::DiffCommitUser.prepare(hash[key])
end
user_tuples << [hash[:author_name], hash[:author_email]]
user_tuples << [hash[:committer_name], hash[:committer_email]]
hash
end
[hashes, user_tuples]
end
def self.oldest_merge_request_id_per_commit(project_id, shas)
# This method is defined here and not on MergeRequest, otherwise the SHA
# values used in the WHERE below won't be encoded correctly.
@ -54,4 +102,20 @@ class MergeRequestDiffCommit < ApplicationRecord
)
.group(:sha)
end
def author_name
commit_author_id ? commit_author.name : super
end
def author_email
commit_author_id ? commit_author.email : super
end
def committer_name
committer_id ? committer.name : super
end
def committer_email
committer_id ? committer.email : super
end
end

View File

@ -9,6 +9,21 @@ module Analytics
expose :description
expose :id
expose :custom
# new API
expose :start_event do
expose :start_event_identifier, as: :identifier, if: -> (s) { s.custom? }
expose :start_event_label, as: :label, using: LabelEntity, if: -> (s) { s.start_event_label_based? }
expose :start_event_html_description, as: :html_description
end
expose :end_event do
expose :end_event_identifier, as: :identifier, if: -> (s) { s.custom? }
expose :end_event_label, as: :label, using: LabelEntity, if: -> (s) { s.end_event_label_based? }
expose :end_event_html_description, as: :html_description
end
# old API
expose :start_event_identifier, if: -> (s) { s.custom? }
expose :end_event_identifier, if: -> (s) { s.custom? }
expose :start_event_label, using: LabelEntity, if: -> (s) { s.start_event_label_based? }

View File

@ -81,11 +81,13 @@ module Projects
def parse_response_links(objects_response)
objects_response.each_with_object([]) do |entry, link_list|
link = entry.dig('actions', DOWNLOAD_ACTION, 'href')
headers = entry.dig('actions', DOWNLOAD_ACTION, 'header')
raise DownloadLinkNotFound unless link
link_list << LfsDownloadObject.new(oid: entry['oid'],
size: entry['size'],
headers: headers,
link: add_credentials(link))
rescue DownloadLinkNotFound, Addressable::URI::InvalidURIError
log_error("Link for Lfs Object with oid #{entry['oid']} not found or invalid.")

View File

@ -11,7 +11,7 @@ module Projects
LARGE_FILE_SIZE = 1.megabytes
attr_reader :lfs_download_object
delegate :oid, :size, :credentials, :sanitized_url, to: :lfs_download_object, prefix: :lfs
delegate :oid, :size, :credentials, :sanitized_url, :headers, to: :lfs_download_object, prefix: :lfs
def initialize(project, lfs_download_object)
super(project)
@ -71,17 +71,21 @@ module Projects
raise_oid_error! if digester.hexdigest != lfs_oid
end
def download_headers
{ stream_body: true }.tap do |headers|
def download_options
http_options = { headers: lfs_headers, stream_body: true }
return http_options if lfs_download_object.has_authorization_header?
http_options.tap do |options|
if lfs_credentials[:user].present? || lfs_credentials[:password].present?
# Using authentication headers in the request
headers[:basic_auth] = { username: lfs_credentials[:user], password: lfs_credentials[:password] }
options[:basic_auth] = { username: lfs_credentials[:user], password: lfs_credentials[:password] }
end
end
end
def fetch_file(&block)
response = Gitlab::HTTP.get(lfs_sanitized_url, download_headers, &block)
response = Gitlab::HTTP.get(lfs_sanitized_url, download_options, &block)
raise ResponseError, "Received error code #{response.code}" unless response.success?
end

View File

@ -8,4 +8,4 @@
%li= token
%p
- pat_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: @target_url }
= html_escape(_('You can create a new one or check them in your %{pat_link_start}personal access tokens%{pat_link_end} settings')) % { pat_link_start: pat_link_start, pat_link_end: '</a>'.html_safe }
= html_escape(_('You can create a new one or check them in your %{pat_link_start}personal access tokens%{pat_link_end} settings.')) % { pat_link_start: pat_link_start, pat_link_end: '</a>'.html_safe }

View File

@ -6,4 +6,4 @@
- <%= token %>
<% end %>
<%= _('You can create a new one or check them in your personal access tokens settings %{pat_link}') % { pat_link: @target_url } %>
<%= _('You can create a new one or check them in your personal access tokens settings %{pat_link}.') % { pat_link: @target_url } %>

View File

@ -4,4 +4,4 @@
= _('One or more of your personal access tokens has expired.')
%p
- pat_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: @target_url }
= html_escape(_('You can create a new one or check them in your %{pat_link_start}personal access tokens%{pat_link_end} settings')) % { pat_link_start: pat_link_start, pat_link_end: '</a>'.html_safe }
= html_escape(_('You can create a new one or check them in your %{pat_link_start}personal access tokens%{pat_link_end} settings.')) % { pat_link_start: pat_link_start, pat_link_end: '</a>'.html_safe }

View File

@ -2,4 +2,4 @@
<%= _('One or more of your personal access tokens has expired.') %>
<%= _('You can create a new one or check them in your personal access tokens settings %{pat_link}') % { pat_link: @target_url } %>
<%= _('You can create a new one or check them in your personal access tokens settings %{pat_link}.') % { pat_link: @target_url } %>

View File

@ -34,31 +34,8 @@
= render_if_exists 'shared/issuable/sidebar_item_epic', issuable_sidebar: issuable_sidebar, group_path: @project.group.full_path, project_path: issuable_sidebar[:project_full_path], issue_iid: issuable_sidebar[:iid], issuable_type: issuable_type
- if issuable_sidebar[:supports_milestone]
- milestone = issuable_sidebar[:milestone] || {}
.block.milestone{ :class => ("gl-border-b-0!" if issuable_sidebar[:supports_iterations]), data: { qa_selector: 'milestone_block' } }
.sidebar-collapsed-icon.has-tooltip{ title: sidebar_milestone_tooltip_label(milestone), data: { container: 'body', html: 'true', placement: 'left', boundary: 'viewport' } }
= sprite_icon('clock')
%span.milestone-title.collapse-truncated-title
- if milestone.present?
= milestone[:title]
- else
= _('None')
.hide-collapsed.gl-line-height-20.gl-mb-2.gl-text-gray-900{ data: { testid: "milestone_title" } }
= _('Milestone')
= loading_icon(css_class: 'gl-vertical-align-text-bottom hidden block-loading')
- if can_edit_issuable
= link_to _('Edit'), '#', class: 'js-sidebar-dropdown-toggle edit-link float-right', data: { qa_selector: "edit_milestone_link", track_label: "right_sidebar", track_property: "milestone", track_event: "click_edit_button", track_value: "" }
.value.hide-collapsed
- if milestone.present?
- milestone_title = milestone[:expired] ? _("%{milestone_name} (Past due)").html_safe % { milestone_name: milestone[:title] } : milestone[:title]
= link_to milestone_title, milestone[:web_url], class: "bold has-tooltip", title: sidebar_milestone_remaining_days(milestone), data: { container: "body", html: 'true', boundary: 'viewport', qa_selector: 'milestone_link', qa_title: milestone[:title] }
- else
%span.no-value
= _('None')
.selectbox.hide-collapsed
= f.hidden_field 'milestone_id', value: milestone[:id], id: nil
= dropdown_tag('Milestone', options: { title: _('Assign milestone'), toggle_class: 'js-milestone-select js-extra-options', filter: true, dropdown_class: 'dropdown-menu-selectable', placeholder: _('Search milestones'), data: { show_no: true, field_name: "#{issuable_type}[milestone_id]", project_id: issuable_sidebar[:project_id], issuable_id: issuable_sidebar[:id], ability_name: issuable_type, issue_update: issuable_sidebar[:issuable_json_path], use_id: true, default_no: true, selected: milestone[:title], null_default: true, display: 'static' }})
.js-milestone-select{ data: { can_edit: can_edit_issuable.to_s, project_path: issuable_sidebar[:project_full_path], issue_iid: issuable_sidebar[:iid] } }
- if @project.group.present? && issuable_sidebar[:supports_iterations]
.block{ class: 'gl-pt-0!', data: { qa_selector: 'iteration_container' } }

View File

@ -0,0 +1,8 @@
---
name: api_caching_repository_compare
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/64418
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/334264
milestone: '14.1'
type: development
group: group::source code
default_enabled: false

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
class AddMergeRequestDiffCommitUsers < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
def up
create_table_with_constraints :merge_request_diff_commit_users, id: :bigint do |t|
t.text :name
t.text :email
t.text_limit :name, 512
t.text_limit :email, 512
t.index [:name, :email], unique: true
end
# Names or Emails can be optional, so in some cases one of these may be
# null. But if both are NULL/empty, no row should exist in this table.
add_check_constraint(
:merge_request_diff_commit_users,
"(COALESCE(name, '') != '') OR (COALESCE(email, '') != '')",
:merge_request_diff_commit_users_name_or_email_existence
)
end
def down
drop_table :merge_request_diff_commit_users
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddMergeRequestDiffCommitUserColumns < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
def up
# NOTE: these columns are _not_ indexed, nor do they use foreign keys.
#
# This is deliberate, as creating these indexes on GitLab.com takes a _very_
# long time. In addition, there's no real need for them either based on how
# this data is used.
#
# For more information, refer to the following:
#
# - https://gitlab.com/gitlab-com/gl-infra/production/-/issues/5038#note_614592881
# - https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63669
add_column(:merge_request_diff_commits, :commit_author_id, :bigint)
add_column(:merge_request_diff_commits, :committer_id, :bigint)
end
def down
remove_column(:merge_request_diff_commits, :commit_author_id)
remove_column(:merge_request_diff_commits, :committer_id)
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddNewUserSignupsCapToNamespaceSettings < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
def up
with_lock_retries do
add_column :namespace_settings, :new_user_signups_cap, :integer, null: true
end
end
def down
with_lock_retries do
remove_column :namespace_settings, :new_user_signups_cap
end
end
end

View File

@ -1,25 +1,10 @@
# frozen_string_literal: true
class ScheduleLatestPipelineIdPopulation < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
DELAY_INTERVAL = 2.minutes.to_i
BATCH_SIZE = 100
MIGRATION = 'PopulateLatestPipelineIds'
disable_ddl_transaction!
def up
return unless Gitlab.ee?
queue_background_migration_jobs_by_range_at_intervals(
Gitlab::BackgroundMigration::PopulateLatestPipelineIds::ProjectSetting.has_vulnerabilities_without_latest_pipeline_set,
MIGRATION,
DELAY_INTERVAL,
batch_size: BATCH_SIZE,
primary_column_name: 'project_id'
)
# no-op: This migration has been marked as no-op and replaced by
# `ReScheduleLatestPipelineIdPopulation` as we've found some problems.
# For more information: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65280
end
def down

View File

@ -0,0 +1,54 @@
# frozen_string_literal: true
class ScheduleMergeRequestDiffUsersBackgroundMigration < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
# The number of rows to process in a single migration job.
#
# The minimum interval for background migrations is two minutes. On staging we
# observed we can process roughly 20 000 rows in a minute. Based on the total
# number of rows on staging, this translates to a total processing time of
# roughly 14 days.
#
# By using a batch size of 40 000, we maintain a rate of roughly 20 000 rows
# per minute, hopefully keeping the total migration time under two weeks;
# instead of four weeks.
BATCH_SIZE = 40_000
MIGRATION_NAME = 'MigrateMergeRequestDiffCommitUsers'
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
end
def up
start = MergeRequestDiff.minimum(:id).to_i
max = MergeRequestDiff.maximum(:id).to_i
delay = BackgroundMigrationWorker.minimum_interval
# The table merge_request_diff_commits contains _a lot_ of rows (roughly 400
# 000 000 on staging). Iterating a table that large to determine job ranges
# would take a while.
#
# To avoid that overhead, we simply schedule fixed ranges according to the
# minimum and maximum IDs. The background migration in turn only processes
# rows that actually exist.
while start < max
stop = start + BATCH_SIZE
migrate_in(delay, MIGRATION_NAME, [start, stop])
Gitlab::Database::BackgroundMigrationJob
.create!(class_name: MIGRATION_NAME, arguments: [start, stop])
delay += BackgroundMigrationWorker.minimum_interval
start += BATCH_SIZE
end
end
def down
# no-op
end
end

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
class ReScheduleLatestPipelineIdPopulation < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
DELAY_INTERVAL = 2.minutes.to_i
BATCH_SIZE = 100
MIGRATION = 'PopulateLatestPipelineIds'
disable_ddl_transaction!
def up
return unless Gitlab.ee?
queue_background_migration_jobs_by_range_at_intervals(
Gitlab::BackgroundMigration::PopulateLatestPipelineIds::ProjectSetting.has_vulnerabilities_without_latest_pipeline_set,
MIGRATION,
DELAY_INTERVAL,
batch_size: BATCH_SIZE,
primary_column_name: 'project_id'
)
end
def down
# no-op
end
end

View File

@ -0,0 +1 @@
42b3090efee66f5a7a5c06d8768d1417892c5d6745f60163a09f58e6e3722761

View File

@ -0,0 +1 @@
aa04d433e400ed3ec11e5d40ada72f122b1d8b7a82f8803d9206da5c94ec5ef9

View File

@ -0,0 +1 @@
0c01bb41113c468a602649b591e1fd2959a6e3190c835ef2e27351cf69f50fd5

View File

@ -0,0 +1 @@
c66a42fc813846a09d4389a895a2d20ad48889d8ff45ab642e771b6792490623

View File

@ -0,0 +1 @@
ed0daff7120cbdba2f0e9ca1f2e40c11114bb2c7db4543903d16891ffbbba3f8

View File

@ -14760,6 +14760,24 @@ CREATE SEQUENCE merge_request_context_commits_id_seq
ALTER SEQUENCE merge_request_context_commits_id_seq OWNED BY merge_request_context_commits.id;
CREATE TABLE merge_request_diff_commit_users (
id bigint NOT NULL,
name text,
email text,
CONSTRAINT check_147358fc42 CHECK ((char_length(name) <= 512)),
CONSTRAINT check_f5fa206cf7 CHECK ((char_length(email) <= 512)),
CONSTRAINT merge_request_diff_commit_users_name_or_email_existence CHECK (((COALESCE(name, ''::text) <> ''::text) OR (COALESCE(email, ''::text) <> ''::text)))
);
CREATE SEQUENCE merge_request_diff_commit_users_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE merge_request_diff_commit_users_id_seq OWNED BY merge_request_diff_commit_users.id;
CREATE TABLE merge_request_diff_commits (
authored_date timestamp without time zone,
committed_date timestamp without time zone,
@ -14771,7 +14789,9 @@ CREATE TABLE merge_request_diff_commits (
committer_name text,
committer_email text,
message text,
trailers jsonb DEFAULT '{}'::jsonb NOT NULL
trailers jsonb DEFAULT '{}'::jsonb NOT NULL,
commit_author_id bigint,
committer_id bigint
);
CREATE TABLE merge_request_diff_details (
@ -15142,6 +15162,7 @@ CREATE TABLE namespace_settings (
resource_access_token_creation_allowed boolean DEFAULT true NOT NULL,
lock_delayed_project_removal boolean DEFAULT false NOT NULL,
prevent_sharing_groups_outside_hierarchy boolean DEFAULT false NOT NULL,
new_user_signups_cap integer,
CONSTRAINT check_0ba93c78c7 CHECK ((char_length(default_branch_name) <= 255))
);
@ -20160,6 +20181,8 @@ ALTER TABLE ONLY merge_request_cleanup_schedules ALTER COLUMN merge_request_id S
ALTER TABLE ONLY merge_request_context_commits ALTER COLUMN id SET DEFAULT nextval('merge_request_context_commits_id_seq'::regclass);
ALTER TABLE ONLY merge_request_diff_commit_users ALTER COLUMN id SET DEFAULT nextval('merge_request_diff_commit_users_id_seq'::regclass);
ALTER TABLE ONLY merge_request_diff_details ALTER COLUMN merge_request_diff_id SET DEFAULT nextval('merge_request_diff_details_merge_request_diff_id_seq'::regclass);
ALTER TABLE ONLY merge_request_diffs ALTER COLUMN id SET DEFAULT nextval('merge_request_diffs_id_seq'::regclass);
@ -21600,6 +21623,9 @@ ALTER TABLE ONLY merge_request_context_commit_diff_files
ALTER TABLE ONLY merge_request_context_commits
ADD CONSTRAINT merge_request_context_commits_pkey PRIMARY KEY (id);
ALTER TABLE ONLY merge_request_diff_commit_users
ADD CONSTRAINT merge_request_diff_commit_users_pkey PRIMARY KEY (id);
ALTER TABLE ONLY merge_request_diff_commits
ADD CONSTRAINT merge_request_diff_commits_pkey PRIMARY KEY (merge_request_diff_id, relative_order);
@ -23903,6 +23929,8 @@ CREATE INDEX index_merge_request_blocks_on_blocked_merge_request_id ON merge_req
CREATE UNIQUE INDEX index_merge_request_cleanup_schedules_on_merge_request_id ON merge_request_cleanup_schedules USING btree (merge_request_id);
CREATE UNIQUE INDEX index_merge_request_diff_commit_users_on_name_and_email ON merge_request_diff_commit_users USING btree (name, email);
CREATE INDEX index_merge_request_diff_commits_on_sha ON merge_request_diff_commits USING btree (sha);
CREATE INDEX index_merge_request_diff_details_failed_verification ON merge_request_diff_details USING btree (verification_retry_at NULLS FIRST) WHERE (verification_state = 3);

View File

@ -1017,6 +1017,47 @@ postgresql['trust_auth_cidr_addresses'] = %w(123.123.123.123/32 <other_cidrs>)
[Reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect.
### Reset the Patroni state in Consul
WARNING:
This is a destructive process and may lead the cluster into a bad state. Make sure that you have a healthy backup before running this process.
As a last resort, if your Patroni cluster is in an unknown/bad state and no node can start, you can
reset the Patroni state in Consul completely, resulting in a reinitialized Patroni cluster when
the first Patroni node starts.
To reset the Patroni state in Consul:
1. Take note of the Patroni node that was the leader, or that the application thinks is the current leader, if the current state shows more than one, or none. One way to do this is to look on the PgBouncer nodes in `/var/opt/gitlab/consul/databases.ini`, which contains the hostname of the current leader.
1. Stop Patroni on all nodes:
```shell
sudo gitlab-ctl stop patroni
```
1. Reset the state in Consul:
```shell
/opt/gitlab/embedded/bin/consul kv delete -recurse /service/postgresql-ha/
```
1. Start one Patroni node, which will initialize the Patroni cluster and be elected as a leader.
It's highly recommended to start the previous leader (noted in the first step),
in order to not lose existing writes that may have not been replicated because
of the broken cluster state:
```shell
sudo gitlab-ctl start patroni
```
1. Start all other Patroni nodes that will join the Patroni cluster as replicas:
```shell
sudo gitlab-ctl start patroni
```
If you are still seeing issues, the next step is restoring the last healthy backup.
### Issues with other components
If you're running into an issue with a component not outlined here, be sure to check the troubleshooting section of their specific documentation page:

Binary file not shown.

After

Width:  |  Height:  |  Size: 162 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 108 KiB

View File

@ -42,13 +42,8 @@ We're currently displaying the information in 2 formats:
1. Budget Spent: This shows the time over the past 28 days that
features owned by the group have not been performing adequately.
We're still discussing which of these is more understandable, please
contribute in
[Scalability issue #946](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/946)
if you have thoughts on this topic.
The budget is calculated based on indicators per component. Each
component has 2 indicators:
component can have 2 indicators:
1. [Apdex](https://en.wikipedia.org/wiki/Apdex): The rate of
operations that performed adequately.
@ -80,14 +75,44 @@ The calculation to a ratio then happens as follows:
\frac {operations\_meeting\_apdex + (total\_operations - operations\_with\_errors)} {total\_apdex\_measurements + total\_operations}
```
*Caveat:* Not all components are included, causing the
calculation to be less accurate for some groups. We're working on
adding all components in
[&437](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/437). This
could cause the dashboard to display "No Data" for features with lower
traffic.
### Check where budget is being spent
## Usage
The row below the error budget row is collapsed by default. Expanding
it shows which component and violation type had the most offending
operations in the past 28 days.
![Error attribution](img/stage_group_dashboards_error_attribution.png)
The first panel on the left shows a table with the number of errors per
component. Digging into the first row in that table is going to have
the biggest impact on the budget spent.
Commonly, the components spending most of the budget are Sidekiq or Puma. The panel in
the center explains what these violation types mean, and how to dig
deeper in the logs.
The panel on the right provides links to Kibana that should reveal
which endpoints or Sidekiq jobs are causing the errors.
To learn how to use these panels and logs for
determining which Rails endpoints are slow,
see the [Error Budget Attribution for Purchase group](https://youtu.be/M9u6unON7bU) video.
Other components visible in the table come from
[service level indicators](https://sre.google/sre-book/service-level-objectives/) (SLIs) defined
in the [metrics
catalog](https://gitlab.com/gitlab-com/runbooks/-/blob/master/metrics-catalog/README.md).
For those types of failures, you can follow the link to the service
dashboard linked from the `type` column. The service dashboard
contains a row specifically for the SLI that is causing the budget
spent, with useful links to the logs and a description of what the
component means. For example, see the `server` component of the
`web-pages` service:
![web-pages-server-component SLI](img/stage_group_dashboards_service_sli_detail.png)
## Usage of the dasbhoard
Inside a stage group dashboard, there are some notable components. Let's take the [Source Code group's dashboard](https://dashboards.gitlab.net/d/stage-groups-source_code/stage-groups-group-dashboard-create-source-code?orgId=1) as an example.

View File

@ -201,29 +201,29 @@ upgrade paths.
| Target version | Your version | Supported upgrade path | Note |
| --------------------- | ------------ | ------------------------ | ---- |
| `13.5.4` | `12.9.2` | `12.9.2` -> `12.10.14` -> `13.0.14` -> `13.1.11` -> `13.5.4` | Three intermediate versions are required: the final `12.10` release, plus `13.0` and `13.1`. |
| `13.2.10` | `11.5.0` | `11.5.0` -> `11.11.8` -> `12.0.12` -> `12.1.17` -> `12.10.14` -> `13.0.14` -> `13.1.11` -> `13.2.10` | Six intermediate versions are required: the final `11.11`, `12.0`, `12.1`, `12.10`, `13.0` releases, plus `13.1`. |
| `12.10.14` | `11.3.4` | `11.3.4` -> `11.11.8` -> `12.0.12` -> `12.1.17` -> `12.10.14` | Three intermediate versions are required: the final `11.11` and `12.0` releases, plus `12.1` |
| `12.9.5` | `10.4.5` | `10.4.5` -> `10.8.7` -> `11.11.8` -> `12.0.12` -> `12.1.17` -> `12.9.5` | Four intermediate versions are required: `10.8`, `11.11`, `12.0` and `12.1`, then `12.9.5` |
| `12.2.5` | `9.2.6` | `9.2.6` -> `9.5.10` -> `10.8.7` -> `11.11.8` -> `12.0.12` -> `12.1.17` -> `12.2.5` | Five intermediate versions are required: `9.5`, `10.8`, `11.11`, `12.0`, `12.1`, then `12.2`. |
| `14.1.0` | `13.9.2` | `13.9.2` -> `13.12.6` -> `14.0.3` -> `14.1.0` | Two intermediate versions are required: `13.12` and `14.0`, then `14.1.0`. |
| `13.5.4` | `12.9.2` | `12.9.2` -> `12.10.14` -> `13.0.14` -> `13.1.11` -> `13.5.4` | Three intermediate versions are required: `12.10`, `13.0` and `13.1`, then `13.5.4`. |
| `13.2.10` | `11.5.0` | `11.5.0` -> `11.11.8` -> `12.0.12` -> `12.1.17` -> `12.10.14` -> `13.0.14` -> `13.1.11` -> `13.2.10` | Six intermediate versions are required: `11.11`, `12.0`, `12.1`, `12.10`, `13.0` and `13.1`, then `13.2.10`. |
| `12.10.14` | `11.3.4` | `11.3.4` -> `11.11.8` -> `12.0.12` -> `12.1.17` -> `12.10.14` | Three intermediate versions are required: `11.11`, `12.0` and `12.1`, then `12.10.14`. |
| `12.9.5` | `10.4.5` | `10.4.5` -> `10.8.7` -> `11.11.8` -> `12.0.12` -> `12.1.17` -> `12.9.5` | Four intermediate versions are required: `10.8`, `11.11`, `12.0` and `12.1`, then `12.9.5`. |
| `12.2.5` | `9.2.6` | `9.2.6` -> `9.5.10` -> `10.8.7` -> `11.11.8` -> `12.0.12` -> `12.1.17` -> `12.2.5` | Five intermediate versions are required: `9.5`, `10.8`, `11.11`, `12.0`, `12.1`, then `12.2.5`. |
| `11.3.4` | `8.13.4` | `8.13.4` -> `8.17.7` -> `9.5.10` -> `10.8.7` -> `11.3.4` | `8.17.7` is the last version in version 8, `9.5.10` is the last version in version 9, `10.8.7` is the last version in version 10. |
## Upgrading to a new major version
Upgrading the *major* version requires more attention.
Backward-incompatible changes and migrations are reserved for major versions.
We cannot guarantee that upgrading between major versions will be seamless.
It is required to upgrade to the latest available *minor* version within
your major version before proceeding to the next major version.
Doing this addresses any backward-incompatible changes or deprecations
to help ensure a successful upgrade to the next major release.
Identify a [supported upgrade path](#upgrade-paths).
Follow the directions carefully as we
cannot guarantee that upgrading between major versions will be seamless.
More significant migrations may occur during major release upgrades. To ensure these are successful:
It is required to follow the following upgrade steps to ensure a successful *major* version upgrade:
1. Increment to the first minor version (`X.0.Z`) during the major version jump.
1. Upgrade to the latest minor version of the preceeding major version.
1. Upgrade to the first minor version (`X.0.Z`) of the target major version.
1. Proceed with upgrading to a newer release.
Identify a [supported upgrade path](#upgrade-paths).
It's also important to ensure that any background migrations have been fully completed
before upgrading to a new major version. To see the current size of the `background_migration` queue,
[Check for background migrations before upgrading](#checking-for-background-migrations-before-upgrading).

View File

@ -563,7 +563,7 @@ You can create a cleanup policy in [the API](#use-the-cleanup-policy-api) or the
To create a cleanup policy in the UI:
1. For your project, go to **Settings > CI/CD**.
1. For your project, go to **Settings > Packages & Registries**.
1. Expand the **Clean up image tags** section.
1. Complete the fields.

View File

@ -138,7 +138,11 @@ module API
compare = CompareService.new(user_project, params[:to]).execute(target_project, params[:from], straight: params[:straight])
if compare
present compare, with: Entities::Compare
if Feature.enabled?(:api_caching_repository_compare, user_project, default_enabled: :yaml)
present_cached compare, with: Entities::Compare, expires_in: 1.day, cache_context: nil
else
present compare, with: Entities::Compare
end
else
not_found!("Ref")
end

View File

@ -0,0 +1,289 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Migrates author and committer names and emails from
# merge_request_diff_commits to two columns that point to
# merge_request_diff_commit_users.
#
# rubocop: disable Metrics/ClassLength
class MigrateMergeRequestDiffCommitUsers
# The number of user rows in merge_request_diff_commit_users to get in a
# single query.
USER_ROWS_PER_QUERY = 1_000
# The number of rows in merge_request_diff_commits to get in a single
# query.
COMMIT_ROWS_PER_QUERY = 10_000
# The number of rows in merge_request_diff_commits to update in a single
# query.
#
# Tests in staging revealed that increasing the number of updates per
# query translates to a longer total runtime for a migration. For example,
# given the same range of rows to migrate, 1000 updates per query required
# a total of roughly 15 seconds. On the other hand, 5000 updates per query
# required a total of roughly 25 seconds. For this reason, we use a value
# of 1000 rows per update.
UPDATES_PER_QUERY = 1_000
# rubocop: disable Style/Documentation
class MergeRequestDiffCommit < ActiveRecord::Base
include FromUnion
extend ::SuppressCompositePrimaryKeyWarning
self.table_name = 'merge_request_diff_commits'
# Yields each row to migrate in the given range.
#
# This method uses keyset pagination to ensure we don't retrieve
# potentially tens of thousands (or even hundreds of thousands) of rows
# in a single query. Such queries could time out, or increase the amount
# of memory needed to process the data.
#
# We can't use `EachBatch` and similar approaches, as
# merge_request_diff_commits doesn't have a single monotonically
# increasing primary key.
def self.each_row_to_migrate(start_id, stop_id, &block)
order = Pagination::Keyset::Order.build(
%w[merge_request_diff_id relative_order].map do |col|
Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: col,
order_expression: self.arel_table[col.to_sym].asc,
nullable: :not_nullable,
distinct: false
)
end
)
scope = MergeRequestDiffCommit
.where(merge_request_diff_id: start_id...stop_id)
.order(order)
Pagination::Keyset::Iterator
.new(scope: scope, use_union_optimization: true)
.each_batch(of: COMMIT_ROWS_PER_QUERY) { |rows| rows.each(&block) }
end
end
# rubocop: enable Style/Documentation
# rubocop: disable Style/Documentation
class MergeRequestDiffCommitUser < ActiveRecord::Base
self.table_name = 'merge_request_diff_commit_users'
def self.union(queries)
from("(#{queries.join("\nUNION ALL\n")}) #{table_name}")
end
end
# rubocop: enable Style/Documentation
def perform(start_id, stop_id)
# This Hash maps user names + emails to their corresponding rows in
# merge_request_diff_commit_users.
user_mapping = {}
user_details, diff_rows_to_update = get_data_to_update(start_id, stop_id)
get_user_rows_in_batches(user_details, user_mapping)
create_missing_users(user_details, user_mapping)
update_commit_rows(diff_rows_to_update, user_mapping)
Database::BackgroundMigrationJob.mark_all_as_succeeded(
'MigrateMergeRequestDiffCommitUsers',
[start_id, stop_id]
)
end
# Returns the data we'll use to determine what merge_request_diff_commits
# rows to update, and what data to use for populating their
# commit_author_id and committer_id columns.
def get_data_to_update(start_id, stop_id)
# This Set is used to retrieve users that already exist in
# merge_request_diff_commit_users.
users = Set.new
# This Hash maps the primary key of every row in
# merge_request_diff_commits to the (trimmed) author and committer
# details to use for updating the row.
to_update = {}
MergeRequestDiffCommit.each_row_to_migrate(start_id, stop_id) do |row|
author = [prepare(row.author_name), prepare(row.author_email)]
committer = [prepare(row.committer_name), prepare(row.committer_email)]
to_update[[row.merge_request_diff_id, row.relative_order]] =
[author, committer]
users << author if author[0] || author[1]
users << committer if committer[0] || committer[1]
end
[users, to_update]
end
# Gets any existing rows in merge_request_diff_commit_users in batches.
#
# This method may end up having to retrieve lots of rows. To reduce the
# overhead, we batch queries into a UNION query. We limit the number of
# queries per UNION so we don't end up sending a single query containing
# too many SELECT statements.
def get_user_rows_in_batches(users, user_mapping)
users.each_slice(USER_ROWS_PER_QUERY) do |pairs|
queries = pairs.map do |(name, email)|
MergeRequestDiffCommitUser.where(name: name, email: email).to_sql
end
MergeRequestDiffCommitUser.union(queries).each do |row|
user_mapping[[row.name.to_s, row.email.to_s]] = row
end
end
end
# Creates any users for which no row exists in
# merge_request_diff_commit_users.
#
# Not all users queried may exist yet, so we need to create any missing
# ones; making sure we handle concurrent creations of the same user
def create_missing_users(users, mapping)
create = []
users.each do |(name, email)|
create << { name: name, email: email } unless mapping[[name, email]]
end
return if create.empty?
MergeRequestDiffCommitUser
.insert_all(create, returning: %w[id name email])
.each do |row|
mapping[[row['name'], row['email']]] = MergeRequestDiffCommitUser
.new(id: row['id'], name: row['name'], email: row['email'])
end
# It's possible for (name, email) pairs to be inserted concurrently,
# resulting in the above insert not returning anything. Here we get any
# remaining users that were created concurrently.
get_user_rows_in_batches(
users.reject { |pair| mapping.key?(pair) },
mapping
)
end
# Updates rows in merge_request_diff_commits with their new
# commit_author_id and committer_id values.
def update_commit_rows(to_update, user_mapping)
MergeRequestDiffCommitUser.transaction do
to_update.each_slice(UPDATES_PER_QUERY) do |slice|
updates = {}
slice.each do |(diff_id, order), (author, committer)|
author_id = user_mapping[author]&.id
committer_id = user_mapping[committer]&.id
updates[[diff_id, order]] = [author_id, committer_id]
end
bulk_update_commit_rows(updates)
end
end
end
# Bulk updates rows in the merge_request_diff_commits table with their new
# author and/or committer ID values.
#
# Updates are batched together to reduce the overhead of having to produce
# a single UPDATE for every row, as we may end up having to update
# thousands of rows at once.
#
# The query produced by this method is along the lines of the following:
#
# UPDATE merge_request_diff_commits
# SET commit_author_id =
# CASE
# WHEN (merge_request_diff_id, relative_order) = (x, y) THEN X
# WHEN ...
# END,
# committer_id =
# CASE
# WHEN (merge_request_diff_id, relative_order) = (x, y) THEN Y
# WHEN ...
# END
# WHERE (merge_request_diff_id, relative_order) IN ( (x, y), ... )
#
# The `mapping` argument is a Hash in the following format:
#
# { [merge_request_diff_id, relative_order] => [author_id, committer_id] }
#
# rubocop: disable Metrics/AbcSize
def bulk_update_commit_rows(mapping)
author_case = Arel::Nodes::Case.new
committer_case = Arel::Nodes::Case.new
primary_values = []
mapping.each do |diff_id_and_order, (author_id, committer_id)|
primary_value = Arel::Nodes::Grouping.new(diff_id_and_order)
primary_values << primary_value
if author_id
author_case.when(primary_key.eq(primary_value)).then(author_id)
end
if committer_id
committer_case.when(primary_key.eq(primary_value)).then(committer_id)
end
end
if author_case.conditions.empty? && committer_case.conditions.empty?
return
end
fields = []
# Statements such as `SET x = CASE END` are not valid SQL statements, so
# we omit setting an ID field if there are no values to populate it
# with.
if author_case.conditions.any?
fields << [arel_table[:commit_author_id], author_case]
end
if committer_case.conditions.any?
fields << [arel_table[:committer_id], committer_case]
end
query = Arel::UpdateManager.new
.table(arel_table)
.where(primary_key.in(primary_values))
.set(fields)
.to_sql
MergeRequestDiffCommit.connection.execute(query)
end
# rubocop: enable Metrics/AbcSize
def primary_key
Arel::Nodes::Grouping.new(
[arel_table[:merge_request_diff_id], arel_table[:relative_order]]
)
end
def arel_table
MergeRequestDiffCommit.arel_table
end
# Prepares a value to be inserted into a column in the table
# `merge_request_diff_commit_users`. Values in this table are limited to
# 512 characters.
#
# We treat empty strings as NULL values, as there's no point in (for
# example) storing a row where both the name and Email are an empty
# string. In addition, if we treated them differently we could end up with
# two rows: one where field X is NULL, and one where field X is an empty
# string. This is redundant, so we avoid storing such data.
def prepare(value)
value.present? ? value[0..511] : nil
end
end
# rubocop: enable Metrics/ClassLength
end
end

View File

@ -61,7 +61,9 @@ tree:
- :push_event_payload
- :suggestions
- merge_request_diff:
- :merge_request_diff_commits
- merge_request_diff_commits:
- :commit_author
- :committer
- :merge_request_diff_files
- events:
- :push_event_payload
@ -201,6 +203,10 @@ excluded_attributes:
- :verification_failure
merge_request_diff_commits:
- :merge_request_diff_id
- :commit_author_id
- :committer_id
merge_request_diff_commit_user:
- :id
merge_request_diff_detail:
- :merge_request_diff_id
- :verification_retry_at

View File

@ -28,6 +28,7 @@ module Gitlab
def find
return if epic? && group.nil?
return find_diff_commit_user if diff_commit_user?
super
end
@ -81,6 +82,13 @@ module Gitlab
end
end
def find_diff_commit_user
find_with_cache do
MergeRequest::DiffCommitUser
.find_or_create(@attributes['name'], @attributes['email'])
end
end
def label?
klass == Label
end
@ -101,6 +109,10 @@ module Gitlab
klass == DesignManagement::Design
end
def diff_commit_user?
klass == MergeRequest::DiffCommitUser
end
# If an existing group milestone used the IID
# claim the IID back and set the group milestone to use one available
# This is necessary to fix situations like the following:

View File

@ -31,7 +31,9 @@ module Gitlab
ci_cd_settings: 'ProjectCiCdSetting',
error_tracking_setting: 'ErrorTracking::ProjectErrorTrackingSetting',
links: 'Releases::Link',
metrics_setting: 'ProjectMetricsSetting' }.freeze
metrics_setting: 'ProjectMetricsSetting',
commit_author: 'MergeRequest::DiffCommitUser',
committer: 'MergeRequest::DiffCommitUser' }.freeze
BUILD_MODELS = %i[Ci::Build commit_status].freeze
@ -56,6 +58,7 @@ module Gitlab
external_pull_request
external_pull_requests
DesignManagement::Design
MergeRequest::DiffCommitUser
].freeze
def create

View File

@ -708,9 +708,6 @@ msgstr ""
msgid "%{message} showing first %{warnings_displayed}"
msgstr ""
msgid "%{milestone_name} (Past due)"
msgstr ""
msgid "%{milestone} (expired)"
msgstr ""
@ -1097,6 +1094,9 @@ msgstr ""
msgid "(deleted)"
msgstr ""
msgid "(expired)"
msgstr ""
msgid "(leave blank if you don't want to change it)"
msgstr ""
@ -37280,7 +37280,7 @@ msgstr ""
msgid "You can create a new SSH key by visiting %{link}"
msgstr ""
msgid "You can create a new one or check them in your %{pat_link_start}personal access tokens%{pat_link_end} settings"
msgid "You can create a new one or check them in your %{pat_link_start}personal access tokens%{pat_link_end} settings."
msgstr ""
msgid "You can create a new one or check them in your %{ssh_key_link_start}SSH keys%{ssh_key_link_end} settings."
@ -37289,7 +37289,7 @@ msgstr ""
msgid "You can create a new one or check them in your SSH keys settings %{ssh_key_link}."
msgstr ""
msgid "You can create a new one or check them in your personal access tokens settings %{pat_link}"
msgid "You can create a new one or check them in your personal access tokens settings %{pat_link}."
msgstr ""
msgid "You can create new ones at your %{pat_link_start}Personal Access Tokens%{pat_link_end} settings"

View File

@ -40,16 +40,22 @@ module QA
base.view 'app/views/shared/issuable/_sidebar.html.haml' do
element :assignee_block
element :edit_milestone_link
element :milestone_block
element :milestone_link
end
base.view 'app/assets/javascripts/sidebar/components/sidebar_dropdown_widget.vue' do
element :milestone_link, 'data-qa-selector="`${issuableAttribute}_link`"' # rubocop:disable QA/ElementWithPattern
end
base.view 'app/assets/javascripts/sidebar/components/sidebar_editable_item.vue' do
element :edit_link
end
end
def assign_milestone(milestone)
click_element(:edit_milestone_link)
within_element(:milestone_block) do
click_link("#{milestone.title}")
click_element(:edit_link)
click_on(milestone.title)
end
wait_until(reload: false) do
@ -89,7 +95,7 @@ module QA
def has_milestone?(milestone_title)
wait_milestone_block_finish_loading do
has_element?(:milestone_link, title: milestone_title)
has_element?(:milestone_link, text: milestone_title)
end
end

View File

@ -56,6 +56,7 @@ RSpec.describe 'Database schema' do
ldap_group_links: %w[group_id],
members: %w[source_id created_by_id],
merge_requests: %w[last_edited_by_id state_id],
merge_request_diff_commits: %w[commit_author_id committer_id],
namespaces: %w[owner_id parent_id],
notes: %w[author_id commit_id noteable_id updated_by_id resolved_by_id confirmed_by_id discussion_id],
notification_settings: %w[source_id],

View File

@ -0,0 +1,8 @@
# frozen_string_literal: true
FactoryBot.define do
factory :merge_request_diff_commit_user, class: 'MergeRequest::DiffCommitUser' do
name { generate(:name) }
email { generate(:email) }
end
end

View File

@ -259,37 +259,35 @@ RSpec.describe 'Issue Sidebar' do
end
context 'editing issue milestone', :js do
let_it_be(:milestone_expired) { create(:milestone, project: project, due_date: 5.days.ago) }
let_it_be(:milestone_expired) { create(:milestone, project: project, title: 'Foo - expired', due_date: 5.days.ago) }
let_it_be(:milestone_no_duedate) { create(:milestone, project: project, title: 'Foo - No due date') }
let_it_be(:milestone1) { create(:milestone, project: project, title: 'Milestone-1', due_date: 20.days.from_now) }
let_it_be(:milestone2) { create(:milestone, project: project, title: 'Milestone-2', due_date: 15.days.from_now) }
let_it_be(:milestone3) { create(:milestone, project: project, title: 'Milestone-3', due_date: 10.days.from_now) }
before do
page.within('[data-testid="milestone_title"]') do
click_on 'Edit'
page.within('.block.milestone') do
click_button 'Edit'
end
wait_for_all_requests
end
it 'shows milestons list in the dropdown' do
page.within('.block.milestone .dropdown-content') do
it 'shows milestones list in the dropdown' do
page.within('.block.milestone') do
# 5 milestones + "No milestone" = 6 items
expect(page.find('ul')).to have_selector('li[data-milestone-id]', count: 6)
expect(page.find('.gl-new-dropdown-contents')).to have_selector('li.gl-new-dropdown-item', count: 6)
end
end
it 'shows expired milestone at the bottom of the list' do
page.within('.block.milestone .dropdown-content ul') do
it 'shows expired milestone at the bottom of the list and milestone due earliest at the top of the list', :aggregate_failures do
page.within('.block.milestone .gl-new-dropdown-contents') do
expect(page.find('li:last-child')).to have_content milestone_expired.title
end
end
it 'shows milestone due earliest at the top of the list' do
page.within('.block.milestone .dropdown-content ul') do
expect(page.all('li[data-milestone-id]')[1]).to have_content milestone3.title
expect(page.all('li[data-milestone-id]')[2]).to have_content milestone2.title
expect(page.all('li[data-milestone-id]')[3]).to have_content milestone1.title
expect(page.all('li[data-milestone-id]')[4]).to have_content milestone_no_duedate.title
expect(page.all('li.gl-new-dropdown-item')[1]).to have_content milestone3.title
expect(page.all('li.gl-new-dropdown-item')[2]).to have_content milestone2.title
expect(page.all('li.gl-new-dropdown-item')[3]).to have_content milestone1.title
expect(page.all('li.gl-new-dropdown-item')[4]).to have_content milestone_no_duedate.title
end
end
end

View File

@ -333,37 +333,40 @@ RSpec.describe "Issues > User edits issue", :js do
describe 'update milestone' do
context 'by authorized user' do
it 'allows user to select unassigned' do
it 'allows user to select no milestone' do
visit project_issue_path(project, issue)
wait_for_requests
page.within('.milestone') do
expect(page).to have_content "None"
end
page.within('.block.milestone') do
expect(page).to have_content 'None'
click_button 'Edit'
wait_for_requests
click_button 'No milestone'
wait_for_requests
find('.block.milestone .edit-link').click
sleep 2 # wait for ajax stuff to complete
first('.dropdown-content li').click
sleep 2
page.within('.milestone') do
expect(page).to have_content 'None'
end
end
it 'allows user to de-select milestone' do
visit project_issue_path(project, issue)
wait_for_requests
page.within('.milestone') do
click_link 'Edit'
click_link milestone.title
click_button 'Edit'
wait_for_requests
click_button milestone.title
page.within '.value' do
page.within '[data-testid="select-milestone"]' do
expect(page).to have_content milestone.title
end
click_link 'Edit'
click_link milestone.title
click_button 'Edit'
wait_for_requests
click_button 'No milestone'
page.within '.value' do
page.within '[data-testid="select-milestone"]' do
expect(page).to have_content 'None'
end
end
@ -371,16 +374,17 @@ RSpec.describe "Issues > User edits issue", :js do
it 'allows user to search milestone' do
visit project_issue_path(project_with_milestones, issue_with_milestones)
wait_for_requests
page.within('.milestone') do
click_link 'Edit'
click_button 'Edit'
wait_for_requests
# We need to enclose search string in quotes for exact match as all the milestone titles
# within tests are prefixed with `My title`.
find('.dropdown-input-field', visible: true).send_keys "\"#{milestones[0].title}\""
find('.gl-form-input', visible: true).send_keys "\"#{milestones[0].title}\""
wait_for_requests
page.within '.dropdown-content' do
page.within '.gl-new-dropdown-contents' do
expect(page).to have_content milestones[0].title
end
end

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -530,6 +530,7 @@ export const mockMilestone1 = {
title: 'Foobar Milestone',
webUrl: 'http://gdk.test:3000/groups/gitlab-org/-/milestones/1',
state: 'active',
expired: false,
};
export const mockMilestone2 = {
@ -538,6 +539,7 @@ export const mockMilestone2 = {
title: 'Awesome Milestone',
webUrl: 'http://gdk.test:3000/groups/gitlab-org/-/milestones/2',
state: 'active',
expired: false,
};
export const mockProjectMilestonesResponse = {
@ -571,6 +573,7 @@ export const mockMilestoneMutationResponse = {
id: 'gid://gitlab/Milestone/2',
title: 'Awesome Milestone',
state: 'active',
expired: false,
__typename: 'Milestone',
},
__typename: 'Issue',

View File

@ -0,0 +1,400 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
let(:merge_requests) { table(:merge_requests) }
let(:diffs) { table(:merge_request_diffs) }
let(:commits) do
table(:merge_request_diff_commits).tap do |t|
t.extend(SuppressCompositePrimaryKeyWarning)
end
end
let(:commit_users) { described_class::MergeRequestDiffCommitUser }
let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
let(:project) { projects.create!(namespace_id: namespace.id) }
let(:merge_request) do
merge_requests.create!(
source_branch: 'x',
target_branch: 'master',
target_project_id: project.id
)
end
let(:diff) { diffs.create!(merge_request_id: merge_request.id) }
let(:migration) { described_class.new }
describe 'MergeRequestDiffCommit' do
describe '.each_row_to_migrate' do
it 'yields the rows to migrate for a given range' do
commit1 = commits.create!(
merge_request_diff_id: diff.id,
relative_order: 0,
sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
author_name: 'bob',
author_email: 'bob@example.com',
committer_name: 'bob',
committer_email: 'bob@example.com'
)
commit2 = commits.create!(
merge_request_diff_id: diff.id,
relative_order: 1,
sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
author_name: 'Alice',
author_email: 'alice@example.com',
committer_name: 'Alice',
committer_email: 'alice@example.com'
)
# We stub this constant to make sure we run at least two pagination
# queries for getting the data. This way we can test if the pagination
# is actually working properly.
stub_const(
'Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers::COMMIT_ROWS_PER_QUERY',
1
)
rows = []
described_class::MergeRequestDiffCommit.each_row_to_migrate(diff.id, diff.id + 1) do |row|
rows << row
end
expect(rows.length).to eq(2)
expect(rows[0].author_name).to eq(commit1.author_name)
expect(rows[1].author_name).to eq(commit2.author_name)
end
end
end
describe 'MergeRequestDiffCommitUser' do
describe '.union' do
it 'produces a union of the given queries' do
alice = commit_users.create!(name: 'Alice', email: 'alice@example.com')
bob = commit_users.create!(name: 'Bob', email: 'bob@example.com')
users = commit_users.union([
commit_users.where(name: 'Alice').to_sql,
commit_users.where(name: 'Bob').to_sql
])
expect(users).to include(alice)
expect(users).to include(bob)
end
end
end
describe '#perform' do
it 'migrates the data in the range' do
commits.create!(
merge_request_diff_id: diff.id,
relative_order: 0,
sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
author_name: 'bob',
author_email: 'bob@example.com',
committer_name: 'bob',
committer_email: 'bob@example.com'
)
migration.perform(diff.id, diff.id + 1)
bob = commit_users.find_by(name: 'bob')
commit = commits.first
expect(commit.commit_author_id).to eq(bob.id)
expect(commit.committer_id).to eq(bob.id)
end
it 'treats empty names and Emails the same as NULL values' do
commits.create!(
merge_request_diff_id: diff.id,
relative_order: 0,
sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
author_name: 'bob',
author_email: 'bob@example.com',
committer_name: '',
committer_email: ''
)
migration.perform(diff.id, diff.id + 1)
bob = commit_users.find_by(name: 'bob')
commit = commits.first
expect(commit.commit_author_id).to eq(bob.id)
expect(commit.committer_id).to be_nil
end
it 'does not update rows without a committer and author' do
commits.create!(
merge_request_diff_id: diff.id,
relative_order: 0,
sha: Gitlab::Database::ShaAttribute.serialize('123abc')
)
migration.perform(diff.id, diff.id + 1)
commit = commits.first
expect(commit_users.count).to eq(0)
expect(commit.commit_author_id).to be_nil
expect(commit.committer_id).to be_nil
end
it 'marks the background job as done' do
Gitlab::Database::BackgroundMigrationJob.create!(
class_name: 'MigrateMergeRequestDiffCommitUsers',
arguments: [diff.id, diff.id + 1]
)
migration.perform(diff.id, diff.id + 1)
job = Gitlab::Database::BackgroundMigrationJob.first
expect(job.status).to eq('succeeded')
end
end
describe '#get_data_to_update' do
it 'returns the users and commit rows to update' do
commits.create!(
merge_request_diff_id: diff.id,
relative_order: 0,
sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
author_name: 'bob' + ('a' * 510),
author_email: 'bob@example.com',
committer_name: 'bob' + ('a' * 510),
committer_email: 'bob@example.com'
)
commits.create!(
merge_request_diff_id: diff.id,
relative_order: 1,
sha: Gitlab::Database::ShaAttribute.serialize('456abc'),
author_name: 'alice',
author_email: 'alice@example.com',
committer_name: 'alice',
committer_email: 'alice@example.com'
)
users, to_update = migration.get_data_to_update(diff.id, diff.id + 1)
bob_name = 'bob' + ('a' * 509)
expect(users).to include(%w[alice alice@example.com])
expect(users).to include([bob_name, 'bob@example.com'])
expect(to_update[[diff.id, 0]])
.to eq([[bob_name, 'bob@example.com'], [bob_name, 'bob@example.com']])
expect(to_update[[diff.id, 1]])
.to eq([%w[alice alice@example.com], %w[alice alice@example.com]])
end
it 'does not include a user if both the name and Email are missing' do
commits.create!(
merge_request_diff_id: diff.id,
relative_order: 0,
sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
author_name: nil,
author_email: nil,
committer_name: 'bob',
committer_email: 'bob@example.com'
)
users, _ = migration.get_data_to_update(diff.id, diff.id + 1)
expect(users).to eq([%w[bob bob@example.com]].to_set)
end
end
describe '#get_user_rows_in_batches' do
it 'retrieves all existing users' do
alice = commit_users.create!(name: 'alice', email: 'alice@example.com')
bob = commit_users.create!(name: 'bob', email: 'bob@example.com')
users = [[alice.name, alice.email], [bob.name, bob.email]]
mapping = {}
migration.get_user_rows_in_batches(users, mapping)
expect(mapping[%w[alice alice@example.com]]).to eq(alice)
expect(mapping[%w[bob bob@example.com]]).to eq(bob)
end
end
describe '#create_missing_users' do
it 'creates merge request diff commit users that are missing' do
alice = commit_users.create!(name: 'alice', email: 'alice@example.com')
users = [%w[alice alice@example.com], %w[bob bob@example.com]]
mapping = { %w[alice alice@example.com] => alice }
migration.create_missing_users(users, mapping)
expect(mapping[%w[alice alice@example.com]]).to eq(alice)
expect(mapping[%w[bob bob@example.com]].name).to eq('bob')
expect(mapping[%w[bob bob@example.com]].email).to eq('bob@example.com')
end
end
describe '#update_commit_rows' do
it 'updates the merge request diff commit rows' do
to_update = { [42, 0] => [%w[alice alice@example.com], []] }
user_mapping = { %w[alice alice@example.com] => double(:user, id: 1) }
expect(migration)
.to receive(:bulk_update_commit_rows)
.with({ [42, 0] => [1, nil] })
migration.update_commit_rows(to_update, user_mapping)
end
end
describe '#bulk_update_commit_rows' do
context 'when there are no authors and committers' do
it 'does not update any rows' do
migration.bulk_update_commit_rows({ [1, 0] => [] })
expect(described_class::MergeRequestDiffCommit.connection)
.not_to receive(:execute)
end
end
context 'when there are only authors' do
it 'only updates the author IDs' do
author = commit_users.create!(name: 'Alice', email: 'alice@example.com')
commit = commits.create!(
merge_request_diff_id: diff.id,
relative_order: 0,
sha: Gitlab::Database::ShaAttribute.serialize('123abc')
)
mapping = {
[commit.merge_request_diff_id, commit.relative_order] =>
[author.id, nil]
}
migration.bulk_update_commit_rows(mapping)
commit = commits.first
expect(commit.commit_author_id).to eq(author.id)
expect(commit.committer_id).to be_nil
end
end
context 'when there are only committers' do
it 'only updates the committer IDs' do
committer =
commit_users.create!(name: 'Alice', email: 'alice@example.com')
commit = commits.create!(
merge_request_diff_id: diff.id,
relative_order: 0,
sha: Gitlab::Database::ShaAttribute.serialize('123abc')
)
mapping = {
[commit.merge_request_diff_id, commit.relative_order] =>
[nil, committer.id]
}
migration.bulk_update_commit_rows(mapping)
commit = commits.first
expect(commit.committer_id).to eq(committer.id)
expect(commit.commit_author_id).to be_nil
end
end
context 'when there are both authors and committers' do
it 'updates both the author and committer IDs' do
author = commit_users.create!(name: 'Bob', email: 'bob@example.com')
committer =
commit_users.create!(name: 'Alice', email: 'alice@example.com')
commit = commits.create!(
merge_request_diff_id: diff.id,
relative_order: 0,
sha: Gitlab::Database::ShaAttribute.serialize('123abc')
)
mapping = {
[commit.merge_request_diff_id, commit.relative_order] =>
[author.id, committer.id]
}
migration.bulk_update_commit_rows(mapping)
commit = commits.first
expect(commit.commit_author_id).to eq(author.id)
expect(commit.committer_id).to eq(committer.id)
end
end
context 'when there are multiple commit rows to update' do
it 'updates all the rows' do
author = commit_users.create!(name: 'Bob', email: 'bob@example.com')
committer =
commit_users.create!(name: 'Alice', email: 'alice@example.com')
commit1 = commits.create!(
merge_request_diff_id: diff.id,
relative_order: 0,
sha: Gitlab::Database::ShaAttribute.serialize('123abc')
)
commit2 = commits.create!(
merge_request_diff_id: diff.id,
relative_order: 1,
sha: Gitlab::Database::ShaAttribute.serialize('456abc')
)
mapping = {
[commit1.merge_request_diff_id, commit1.relative_order] =>
[author.id, committer.id],
[commit2.merge_request_diff_id, commit2.relative_order] =>
[author.id, nil]
}
migration.bulk_update_commit_rows(mapping)
commit1 = commits.find_by(relative_order: 0)
commit2 = commits.find_by(relative_order: 1)
expect(commit1.commit_author_id).to eq(author.id)
expect(commit1.committer_id).to eq(committer.id)
expect(commit2.commit_author_id).to eq(author.id)
expect(commit2.committer_id).to be_nil
end
end
end
describe '#primary_key' do
it 'returns the primary key for the commits table' do
key = migration.primary_key
expect(key.to_sql).to eq('("merge_request_diff_commits"."merge_request_diff_id", "merge_request_diff_commits"."relative_order")')
end
end
describe '#prepare' do
it 'trims a value to at most 512 characters' do
expect(migration.prepare('€' * 1_000)).to eq('€' * 512)
end
it 'returns nil if the value is an empty string' do
expect(migration.prepare('')).to be_nil
end
end
end

View File

@ -198,6 +198,8 @@ merge_request_diff:
- merge_request_diff_files
merge_request_diff_commits:
- merge_request_diff
- commit_author
- committer
merge_request_diff_detail:
- merge_request_diff
merge_request_diff_files:

View File

@ -109,14 +109,14 @@ RSpec.describe 'Test coverage of the Project Import' do
def failure_message(not_tested_relations)
<<~MSG
These relations seem to be added recenty and
These relations seem to be added recently and
they expected to be covered in our Import specs: #{not_tested_relations}.
To do that, expand one of the files listed in `project_json_fixtures`
(or expand the list if you consider adding a new fixture file).
After that, add a new spec into
`spec/lib/gitlab/import_export/project_tree_restorer_spec.rb`
`spec/lib/gitlab/import_export/project/tree_restorer_spec.rb`
to check that the relation is being imported correctly.
In case the spec breaks the master or there is a sense of urgency,

View File

@ -150,4 +150,30 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
expect(merge_request.persisted?).to be true
end
end
context 'merge request diff commit users' do
it 'finds the existing user' do
user = MergeRequest::DiffCommitUser
.find_or_create('Alice', 'alice@example.com')
found = described_class.build(
MergeRequest::DiffCommitUser,
'name' => 'Alice',
'email' => 'alice@example.com'
)
expect(found).to eq(user)
end
it 'creates a new user' do
found = described_class.build(
MergeRequest::DiffCommitUser,
'name' => 'Alice',
'email' => 'alice@example.com'
)
expect(found.name).to eq('Alice')
expect(found.email).to eq('alice@example.com')
end
end
end

View File

@ -224,6 +224,27 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
expect(MergeRequestDiffCommit.count).to eq(77)
end
it 'assigns committer and author details to all diff commits' do
MergeRequestDiffCommit.all.each do |commit|
expect(commit.commit_author_id).not_to be_nil
expect(commit.committer_id).not_to be_nil
end
end
it 'assigns the correct commit users to different diff commits' do
commit1 = MergeRequestDiffCommit
.find_by(sha: '0b4bc9a49b562e85de7cc9e834518ea6828729b9')
commit2 = MergeRequestDiffCommit
.find_by(sha: 'a4e5dfebf42e34596526acb8611bc7ed80e4eb3f')
expect(commit1.commit_author.name).to eq('Dmitriy Zaporozhets')
expect(commit1.commit_author.email).to eq('dmitriy.zaporozhets@gmail.com')
expect(commit2.commit_author.name).to eq('James Lopez')
expect(commit2.commit_author.email).to eq('james@jameslopez.es')
end
it 'has the correct data for merge request latest_merge_request_diff' do
MergeRequest.find_each do |merge_request|
expect(merge_request.latest_merge_request_diff_id).to eq(merge_request.merge_request_diffs.maximum(:id))

View File

@ -235,6 +235,10 @@ MergeRequestDiffCommit:
- committer_email
- message
- trailers
MergeRequest::DiffCommitUser:
- id
- name
- email
MergeRequestDiffFile:
- merge_request_diff_id
- relative_order

View File

@ -0,0 +1,60 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration! 'schedule_merge_request_diff_users_background_migration'
RSpec.describe ScheduleMergeRequestDiffUsersBackgroundMigration, :migration do
let(:migration) { described_class.new }
describe '#up' do
before do
allow(described_class::MergeRequestDiff)
.to receive(:minimum)
.with(:id)
.and_return(42)
allow(described_class::MergeRequestDiff)
.to receive(:maximum)
.with(:id)
.and_return(85_123)
end
it 'schedules the migrations in batches' do
expect(migration)
.to receive(:migrate_in)
.ordered
.with(2.minutes.to_i, described_class::MIGRATION_NAME, [42, 40_042])
expect(migration)
.to receive(:migrate_in)
.ordered
.with(4.minutes.to_i, described_class::MIGRATION_NAME, [40_042, 80_042])
expect(migration)
.to receive(:migrate_in)
.ordered
.with(6.minutes.to_i, described_class::MIGRATION_NAME, [80_042, 120_042])
migration.up
end
it 'creates rows to track the background migration jobs' do
expect(Gitlab::Database::BackgroundMigrationJob)
.to receive(:create!)
.ordered
.with(class_name: described_class::MIGRATION_NAME, arguments: [42, 40_042])
expect(Gitlab::Database::BackgroundMigrationJob)
.to receive(:create!)
.ordered
.with(class_name: described_class::MIGRATION_NAME, arguments: [40_042, 80_042])
expect(Gitlab::Database::BackgroundMigrationJob)
.to receive(:create!)
.ordered
.with(class_name: described_class::MIGRATION_NAME, arguments: [80_042, 120_042])
migration.up
end
end
end

View File

@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
RSpec.describe ScheduleLatestPipelineIdPopulation do
RSpec.describe ReScheduleLatestPipelineIdPopulation do
let(:namespaces) { table(:namespaces) }
let(:pipelines) { table(:ci_pipelines) }
let(:projects) { table(:projects) }

View File

@ -13,7 +13,15 @@ RSpec.describe Compare do
let(:raw_compare) { Gitlab::Git::Compare.new(project.repository.raw_repository, start_commit.id, head_commit.id) }
subject { described_class.new(raw_compare, project) }
subject(:compare) { described_class.new(raw_compare, project) }
describe '#cache_key' do
subject { compare.cache_key }
it { is_expected.to include(project) }
it { is_expected.to include(:compare) }
it { is_expected.to include(compare.diff_refs.hash) }
end
describe '#start_commit' do
it 'returns raw compare base commit' do

View File

@ -6,8 +6,45 @@ RSpec.describe LfsDownloadObject do
let(:oid) { 'cd293be6cea034bd45a0352775a219ef5dc7825ce55d1f7dae9762d80ce64411' }
let(:link) { 'http://www.example.com' }
let(:size) { 1 }
let(:headers) { { test: "asdf" } }
subject { described_class.new(oid: oid, size: size, link: link) }
subject { described_class.new(oid: oid, size: size, link: link, headers: headers) }
describe '#headers' do
it 'returns specified Hash' do
expect(subject.headers).to eq(headers)
end
context 'with nil headers' do
let(:headers) { nil }
it 'returns a Hash' do
expect(subject.headers).to eq({})
end
end
end
describe '#has_authorization_header?' do
it 'returns false' do
expect(subject.has_authorization_header?).to be false
end
context 'with uppercase form' do
let(:headers) { { 'Authorization' => 'Basic 12345' } }
it 'returns true' do
expect(subject.has_authorization_header?).to be true
end
end
context 'with lowercase form' do
let(:headers) { { 'authorization' => 'Basic 12345' } }
it 'returns true' do
expect(subject.has_authorization_header?).to be true
end
end
end
describe 'validations' do
it { is_expected.to validate_numericality_of(:size).is_greater_than_or_equal_to(0) }
@ -66,5 +103,16 @@ RSpec.describe LfsDownloadObject do
end
end
end
context 'headers attribute' do
it 'only nil and Hash values are valid' do
aggregate_failures do
expect(described_class.new(oid: oid, size: size, link: 'http://www.example.com', headers: nil)).to be_valid
expect(described_class.new(oid: oid, size: size, link: 'http://www.example.com', headers: {})).to be_valid
expect(described_class.new(oid: oid, size: size, link: 'http://www.example.com', headers: { 'test' => 123 })).to be_valid
expect(described_class.new(oid: oid, size: size, link: 'http://www.example.com', headers: 'test')).to be_invalid
end
end
end
end
end

View File

@ -0,0 +1,127 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe MergeRequest::DiffCommitUser do
describe 'validations' do
it 'requires that names are less than 512 characters long' do
expect(described_class.new(name: 'a' * 1000)).not_to be_valid
end
it 'requires that Emails are less than 512 characters long' do
expect(described_class.new(email: 'a' * 1000)).not_to be_valid
end
it 'requires either a name or Email' do
expect(described_class.new).not_to be_valid
end
it 'allows setting of just a name' do
expect(described_class.new(name: 'Alice')).to be_valid
end
it 'allows setting of just an Email' do
expect(described_class.new(email: 'alice@example.com')).to be_valid
end
it 'allows setting of both a name and Email' do
expect(described_class.new(name: 'Alice', email: 'alice@example.com'))
.to be_valid
end
end
describe '.prepare' do
it 'trims a value to at most 512 characters' do
expect(described_class.prepare('€' * 1_000)).to eq('€' * 512)
end
it 'returns nil if the value is an empty string' do
expect(described_class.prepare('')).to be_nil
end
end
describe '.find_or_create' do
it 'creates a new row if none exist' do
alice = described_class.find_or_create('Alice', 'alice@example.com')
expect(alice.name).to eq('Alice')
expect(alice.email).to eq('alice@example.com')
end
it 'returns an existing row if one exists' do
user1 = create(:merge_request_diff_commit_user)
user2 = described_class.find_or_create(user1.name, user1.email)
expect(user1).to eq(user2)
end
it 'handles concurrent inserts' do
user = create(:merge_request_diff_commit_user)
expect(described_class)
.to receive(:find_or_create_by!)
.ordered
.with(name: user.name, email: user.email)
.and_raise(ActiveRecord::RecordNotUnique)
expect(described_class)
.to receive(:find_or_create_by!)
.ordered
.with(name: user.name, email: user.email)
.and_return(user)
expect(described_class.find_or_create(user.name, user.email)).to eq(user)
end
end
describe '.bulk_find_or_create' do
it 'bulk creates missing rows and reuses existing rows' do
bob = create(
:merge_request_diff_commit_user,
name: 'Bob',
email: 'bob@example.com'
)
users = described_class.bulk_find_or_create(
[%w[Alice alice@example.com], %w[Bob bob@example.com]]
)
alice = described_class.find_by(name: 'Alice')
expect(users[%w[Alice alice@example.com]]).to eq(alice)
expect(users[%w[Bob bob@example.com]]).to eq(bob)
end
it 'does not insert any data when all users exist' do
bob = create(
:merge_request_diff_commit_user,
name: 'Bob',
email: 'bob@example.com'
)
users = described_class.bulk_find_or_create([%w[Bob bob@example.com]])
expect(described_class).not_to receive(:insert_all)
expect(users[%w[Bob bob@example.com]]).to eq(bob)
end
it 'handles concurrently inserted rows' do
bob = create(
:merge_request_diff_commit_user,
name: 'Bob',
email: 'bob@example.com'
)
input = [%w[Bob bob@example.com]]
expect(described_class)
.to receive(:bulk_find)
.twice
.with(input)
.and_return([], [bob])
users = described_class.bulk_find_or_create(input)
expect(users[%w[Bob bob@example.com]]).to eq(bob)
end
end
end

View File

@ -16,6 +16,11 @@ RSpec.describe MergeRequestDiffCommit do
let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined
end
describe 'associations' do
it { is_expected.to belong_to(:commit_author) }
it { is_expected.to belong_to(:committer) }
end
describe '#to_hash' do
subject { merge_request.commits.first }
@ -46,6 +51,8 @@ RSpec.describe MergeRequestDiffCommit do
"committed_date": "2014-02-27T10:01:38.000+01:00".to_time,
"committer_name": "Dmitriy Zaporozhets",
"committer_email": "dmitriy.zaporozhets@gmail.com",
"commit_author_id": an_instance_of(Integer),
"committer_id": an_instance_of(Integer),
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 0,
"sha": Gitlab::Database::ShaAttribute.serialize("5937ac0a7beb003549fc5fd26fc247adbce4a52e"),
@ -59,6 +66,8 @@ RSpec.describe MergeRequestDiffCommit do
"committed_date": "2014-02-27T09:57:31.000+01:00".to_time,
"committer_name": "Dmitriy Zaporozhets",
"committer_email": "dmitriy.zaporozhets@gmail.com",
"commit_author_id": an_instance_of(Integer),
"committer_id": an_instance_of(Integer),
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 1,
"sha": Gitlab::Database::ShaAttribute.serialize("570e7b2abdd848b95f2f578043fc23bd6f6fd24d"),
@ -76,6 +85,21 @@ RSpec.describe MergeRequestDiffCommit do
subject
end
it 'creates diff commit users' do
diff = create(:merge_request_diff, merge_request: merge_request)
described_class.create_bulk(diff.id, [commits.first])
commit_row = MergeRequestDiffCommit
.find_by(merge_request_diff_id: diff.id, relative_order: 0)
commit_user_row =
MergeRequest::DiffCommitUser.find_by(name: 'Dmitriy Zaporozhets')
expect(commit_row.commit_author).to eq(commit_user_row)
expect(commit_row.committer).to eq(commit_user_row)
end
context 'with dates larger than the DB limit' do
let(:commits) do
# This commit's date is "Sun Aug 17 07:12:55 292278994 +0000"
@ -92,6 +116,8 @@ RSpec.describe MergeRequestDiffCommit do
"committed_date": timestamp,
"committer_name": "Alejandro Rodríguez",
"committer_email": "alejorro70@gmail.com",
"commit_author_id": an_instance_of(Integer),
"committer_id": an_instance_of(Integer),
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 0,
"sha": Gitlab::Database::ShaAttribute.serialize("ba3343bc4fa403a8dfbfcab7fc1a8c29ee34bd69"),
@ -107,4 +133,28 @@ RSpec.describe MergeRequestDiffCommit do
end
end
end
describe '.prepare_commits_for_bulk_insert' do
it 'returns the commit hashes and unique user tuples' do
commit = double(:commit, to_hash: {
parent_ids: %w[foo bar],
author_name: 'a' * 1000,
author_email: 'a' * 1000,
committer_name: 'Alice',
committer_email: 'alice@example.com'
})
hashes, tuples = described_class.prepare_commits_for_bulk_insert([commit])
expect(hashes).to eq([{
author_name: 'a' * 512,
author_email: 'a' * 512,
committer_name: 'Alice',
committer_email: 'alice@example.com'
}])
expect(tuples)
.to include(['a' * 512, 'a' * 512], %w[Alice alice@example.com])
end
end
end

View File

@ -488,6 +488,17 @@ RSpec.describe API::Repositories do
let(:current_user) { nil }
end
end
context 'api_caching_repository_compare is disabled' do
before do
stub_feature_flags(api_caching_repository_compare: false)
end
it_behaves_like 'repository compare' do
let(:project) { create(:project, :public, :repository) }
let(:current_user) { nil }
end
end
end
describe 'GET /projects/:id/repository/contributors' do

View File

@ -11,4 +11,12 @@ RSpec.describe Analytics::CycleAnalytics::StageEntity do
expect(entity_json).to have_key(:start_event_html_description)
expect(entity_json).to have_key(:end_event_html_description)
end
it 'exposes start_event and end_event objects' do
expect(entity_json[:start_event][:identifier]).to eq(entity_json[:start_event_identifier])
expect(entity_json[:end_event][:identifier]).to eq(entity_json[:end_event_identifier])
expect(entity_json[:start_event][:html_description]).to eq(entity_json[:start_event_html_description])
expect(entity_json[:end_event][:html_description]).to eq(entity_json[:end_event_html_description])
end
end

View File

@ -6,6 +6,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
let(:lfs_endpoint) { "#{import_url}/info/lfs/objects/batch" }
let!(:project) { create(:project, import_url: import_url) }
let(:new_oids) { { 'oid1' => 123, 'oid2' => 125 } }
let(:headers) { { 'X-Some-Header' => '456' }}
let(:remote_uri) { URI.parse(lfs_endpoint) }
let(:request_object) { HTTParty::Request.new(Net::HTTP::Post, '/') }
@ -18,7 +19,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
{
'oid' => oid, 'size' => size,
'actions' => {
'download' => { 'href' => "#{import_url}/gitlab-lfs/objects/#{oid}" }
'download' => { 'href' => "#{import_url}/gitlab-lfs/objects/#{oid}", header: headers }
}
}
end
@ -48,12 +49,20 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
end
describe '#execute' do
let(:download_objects) { subject.execute(new_oids) }
it 'retrieves each download link of every non existent lfs object' do
subject.execute(new_oids).each do |lfs_download_object|
download_objects.each do |lfs_download_object|
expect(lfs_download_object.link).to eq "#{import_url}/gitlab-lfs/objects/#{lfs_download_object.oid}"
end
end
it 'stores headers' do
download_objects.each do |lfs_download_object|
expect(lfs_download_object.headers).to eq(headers)
end
end
context 'when lfs objects size is larger than the batch size' do
def stub_successful_request(batch)
response = custom_response(success_net_response, objects_response(batch))

View File

@ -155,13 +155,24 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do
context 'when credentials present' do
let(:download_link_with_credentials) { "http://user:password@gitlab.com/#{oid}" }
let(:lfs_object) { LfsDownloadObject.new(oid: oid, size: size, link: download_link_with_credentials) }
before do
stub_full_request(download_link).with(headers: { 'Authorization' => 'Basic dXNlcjpwYXNzd29yZA==' }).to_return(body: lfs_content)
end
let!(:request_stub) { stub_full_request(download_link).with(headers: { 'Authorization' => 'Basic dXNlcjpwYXNzd29yZA==' }).to_return(body: lfs_content) }
it 'the request adds authorization headers' do
subject
subject.execute
expect(request_stub).to have_been_requested
end
context 'when Authorization header is present' do
let(:auth_header) { { 'Authorization' => 'Basic 12345' } }
let(:lfs_object) { LfsDownloadObject.new(oid: oid, size: size, link: download_link_with_credentials, headers: auth_header) }
let!(:request_stub) { stub_full_request(download_link).with(headers: auth_header).to_return(body: lfs_content) }
it 'request uses the header auth' do
subject.execute
expect(request_stub).to have_been_requested
end
end
end