Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
b64a8161c9
commit
d8b32df644
|
|
@ -1 +1 @@
|
|||
2.6.5
|
||||
2.6.6
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import {
|
|||
GlDropdownItem,
|
||||
GlTabs,
|
||||
GlTab,
|
||||
GlDeprecatedBadge as GlBadge,
|
||||
GlBadge,
|
||||
} from '@gitlab/ui';
|
||||
import createFlash from '~/flash';
|
||||
import { s__ } from '~/locale';
|
||||
|
|
@ -77,6 +77,11 @@ export default {
|
|||
tdClass: `${tdClass} text-md-right`,
|
||||
sortable: true,
|
||||
},
|
||||
{
|
||||
key: 'assignees',
|
||||
label: s__('AlertManagement|Assignees'),
|
||||
tdClass,
|
||||
},
|
||||
{
|
||||
key: 'status',
|
||||
thClass: 'w-15p',
|
||||
|
|
@ -237,6 +242,10 @@ export default {
|
|||
const { category, action, label } = trackAlertStatusUpdateOptions;
|
||||
Tracking.event(category, action, { label, property: status });
|
||||
},
|
||||
getAssignees(assignees) {
|
||||
// TODO: Update to show list of assignee(s) after https://gitlab.com/gitlab-org/gitlab/-/issues/218405
|
||||
return assignees?.length > 0 ? assignees[0]?.username : s__('AlertManagement|Unassigned');
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
|
@ -308,6 +317,12 @@ export default {
|
|||
<div class="gl-max-w-full text-truncate">{{ item.title }}</div>
|
||||
</template>
|
||||
|
||||
<template #cell(assignees)="{ item }">
|
||||
<div class="gl-max-w-full text-truncate" data-testid="assigneesField">
|
||||
{{ getAssignees(item.assignees) }}
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<template #cell(status)="{ item }">
|
||||
<gl-dropdown :text="$options.statuses[item.status]" class="w-100" right>
|
||||
<gl-dropdown-item
|
||||
|
|
|
|||
|
|
@ -6,5 +6,8 @@ fragment AlertListItem on AlertManagementAlert {
|
|||
startedAt
|
||||
endedAt
|
||||
eventCount
|
||||
issueIid
|
||||
issueIid,
|
||||
assignees {
|
||||
username
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ import GraphGroup from './graph_group.vue';
|
|||
import EmptyState from './empty_state.vue';
|
||||
import GroupEmptyState from './group_empty_state.vue';
|
||||
import VariablesSection from './variables_section.vue';
|
||||
import LinksSection from './links_section.vue';
|
||||
|
||||
import TrackEventDirective from '~/vue_shared/directives/track_event';
|
||||
import {
|
||||
|
|
@ -38,6 +39,7 @@ export default {
|
|||
EmptyState,
|
||||
GroupEmptyState,
|
||||
VariablesSection,
|
||||
LinksSection,
|
||||
},
|
||||
directives: {
|
||||
GlModal: GlModalDirective,
|
||||
|
|
@ -154,12 +156,16 @@ export default {
|
|||
'showEmptyState',
|
||||
'expandedPanel',
|
||||
'variables',
|
||||
'links',
|
||||
'currentDashboard',
|
||||
]),
|
||||
...mapGetters('monitoringDashboard', ['selectedDashboard', 'getMetricStates']),
|
||||
shouldShowVariablesSection() {
|
||||
return Object.keys(this.variables).length > 0;
|
||||
},
|
||||
shouldShowLinksSection() {
|
||||
return Object.keys(this.links).length > 0;
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
dashboard(newDashboard) {
|
||||
|
|
@ -309,6 +315,7 @@ export default {
|
|||
@setRearrangingPanels="onSetRearrangingPanels"
|
||||
/>
|
||||
<variables-section v-if="shouldShowVariablesSection && !showEmptyState" />
|
||||
<links-section v-if="shouldShowLinksSection && !showEmptyState" />
|
||||
<div v-if="!showEmptyState">
|
||||
<dashboard-panel
|
||||
v-show="expandedPanel.panel"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,36 @@
|
|||
<script>
|
||||
/**
|
||||
* This component generates user-defined links in the
|
||||
* dashboard yml file. However, this component will be
|
||||
* used in the metrics dashboard after
|
||||
* https://gitlab.com/gitlab-org/gitlab/-/merge_requests/32895
|
||||
*/
|
||||
import { mapState } from 'vuex';
|
||||
import { GlIcon, GlLink } from '@gitlab/ui';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlIcon,
|
||||
GlLink,
|
||||
},
|
||||
computed: {
|
||||
...mapState('monitoringDashboard', ['links']),
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<div
|
||||
ref="linksSection"
|
||||
class="d-sm-flex flex-sm-wrap gl-mt-3 gl-p-2 bg-gray-light border border-radius-default"
|
||||
>
|
||||
<div
|
||||
v-for="(link, key) in links"
|
||||
:key="key"
|
||||
class="gl-mb-1 gl-pr-3 d-flex d-sm-block text-break-word"
|
||||
>
|
||||
<gl-link :href="link.url" class="text-plain text-decoration-none"
|
||||
><gl-icon name="link" class="align-text-bottom mr-1" />{{ link.title }}
|
||||
</gl-link>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
|
@ -36,10 +36,14 @@ export default () => ({
|
|||
allDashboards: [],
|
||||
/**
|
||||
* User-defined custom variables are passed
|
||||
* via the dashboard.yml file.
|
||||
* via the dashboard yml file.
|
||||
*/
|
||||
variables: {},
|
||||
|
||||
/**
|
||||
* User-defined custom links are passed
|
||||
* via the dashboard yml file.
|
||||
*/
|
||||
links: {},
|
||||
// Other project data
|
||||
annotations: [],
|
||||
deploymentData: [],
|
||||
|
|
|
|||
|
|
@ -8,14 +8,21 @@ class Projects::BadgesController < Projects::ApplicationController
|
|||
|
||||
def pipeline
|
||||
pipeline_status = Gitlab::Badge::Pipeline::Status
|
||||
.new(project, params[:ref])
|
||||
.new(project, params[:ref], opts: {
|
||||
key_text: params[:key_text],
|
||||
key_width: params[:key_width]
|
||||
})
|
||||
|
||||
render_badge pipeline_status
|
||||
end
|
||||
|
||||
def coverage
|
||||
coverage_report = Gitlab::Badge::Coverage::Report
|
||||
.new(project, params[:ref], params[:job])
|
||||
.new(project, params[:ref], opts: {
|
||||
job: params[:job],
|
||||
key_text: params[:key_text],
|
||||
key_width: params[:key_width]
|
||||
})
|
||||
|
||||
render_badge coverage_report
|
||||
end
|
||||
|
|
|
|||
|
|
@ -83,6 +83,11 @@ module Types
|
|||
Types::TimeType,
|
||||
null: true,
|
||||
description: 'Timestamp the alert was last updated'
|
||||
|
||||
field :assignees,
|
||||
[Types::UserType],
|
||||
null: true,
|
||||
description: 'Assignees of the alert'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module AlertManagement
|
||||
def self.table_name_prefix
|
||||
'alert_management_'
|
||||
end
|
||||
end
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require_dependency 'alert_management'
|
||||
|
||||
module AlertManagement
|
||||
class Alert < ApplicationRecord
|
||||
include AtomicInternalId
|
||||
|
|
@ -23,9 +25,11 @@ module AlertManagement
|
|||
|
||||
belongs_to :project
|
||||
belongs_to :issue, optional: true
|
||||
has_internal_id :iid, scope: :project, init: ->(s) { s.project.alert_management_alerts.maximum(:iid) }
|
||||
|
||||
self.table_name = 'alert_management_alerts'
|
||||
has_many :alert_assignees, inverse_of: :alert
|
||||
has_many :assignees, through: :alert_assignees
|
||||
|
||||
has_internal_id :iid, scope: :project, init: ->(s) { s.project.alert_management_alerts.maximum(:iid) }
|
||||
|
||||
sha_attribute :fingerprint
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module AlertManagement
|
||||
class AlertAssignee < ApplicationRecord
|
||||
belongs_to :alert, inverse_of: :alert_assignees
|
||||
belongs_to :assignee, class_name: 'User', foreign_key: :user_id
|
||||
|
||||
validates :alert, presence: true
|
||||
validates :assignee, presence: true, uniqueness: { scope: :alert_id }
|
||||
end
|
||||
end
|
||||
|
|
@ -39,6 +39,7 @@ module Ci
|
|||
has_one :resource, class_name: 'Ci::Resource', inverse_of: :build
|
||||
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
|
||||
has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id
|
||||
has_many :report_results, class_name: 'Ci::BuildReportResult', inverse_of: :build
|
||||
|
||||
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
|
||||
has_many :job_variables, class_name: 'Ci::JobVariable', foreign_key: :job_id
|
||||
|
|
|
|||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
class BuildReportResult < ApplicationRecord
|
||||
extend Gitlab::Ci::Model
|
||||
|
||||
self.primary_key = :build_id
|
||||
|
||||
belongs_to :build, class_name: "Ci::Build", inverse_of: :report_results
|
||||
belongs_to :project, class_name: "Project", inverse_of: :build_report_results
|
||||
|
||||
validates :build, :project, presence: true
|
||||
validates :data, json_schema: { filename: "build_report_result_data" }
|
||||
end
|
||||
end
|
||||
|
|
@ -4,7 +4,7 @@ module PerformanceMonitoring
|
|||
class PrometheusDashboard
|
||||
include ActiveModel::Model
|
||||
|
||||
attr_accessor :dashboard, :panel_groups, :path, :environment, :priority, :templating
|
||||
attr_accessor :dashboard, :panel_groups, :path, :environment, :priority, :templating, :links
|
||||
|
||||
validates :dashboard, presence: true
|
||||
validates :panel_groups, presence: true
|
||||
|
|
|
|||
|
|
@ -291,6 +291,7 @@ class Project < ApplicationRecord
|
|||
has_many :builds, class_name: 'Ci::Build', inverse_of: :project, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
|
||||
has_many :build_trace_section_names, class_name: 'Ci::BuildTraceSectionName'
|
||||
has_many :build_trace_chunks, class_name: 'Ci::BuildTraceChunk', through: :builds, source: :trace_chunks
|
||||
has_many :build_report_results, class_name: 'Ci::BuildReportResult', inverse_of: :project
|
||||
has_many :job_artifacts, class_name: 'Ci::JobArtifact'
|
||||
has_many :runner_projects, class_name: 'Ci::RunnerProject', inverse_of: :project
|
||||
has_many :runners, through: :runner_projects, source: :runner, class_name: 'Ci::Runner'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,41 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module JiraImport
|
||||
class UsersImporter
|
||||
attr_reader :user, :project, :start_at, :result
|
||||
|
||||
MAX_USERS = 50
|
||||
|
||||
def initialize(user, project, start_at)
|
||||
@project = project
|
||||
@start_at = start_at
|
||||
@user = user
|
||||
end
|
||||
|
||||
def execute
|
||||
project.validate_jira_import_settings!(user: user)
|
||||
|
||||
return ServiceResponse.success(payload: nil) if users.blank?
|
||||
|
||||
result = UsersMapper.new(project, users).execute
|
||||
ServiceResponse.success(payload: result)
|
||||
rescue Timeout::Error, Errno::EINVAL, Errno::ECONNRESET, Errno::ECONNREFUSED, URI::InvalidURIError, JIRA::HTTPError, OpenSSL::SSL::SSLError => error
|
||||
Gitlab::ErrorTracking.track_exception(error, project_id: project.id, request: url)
|
||||
ServiceResponse.error(message: "There was an error when communicating to Jira: #{error.message}")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def users
|
||||
@users ||= client.get(url)
|
||||
end
|
||||
|
||||
def url
|
||||
"/rest/api/2/users?maxResults=#{MAX_USERS}&startAt=#{start_at.to_i}"
|
||||
end
|
||||
|
||||
def client
|
||||
@client ||= project.jira_service.client
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module JiraImport
|
||||
class UsersMapper
|
||||
attr_reader :project, :jira_users
|
||||
|
||||
def initialize(project, jira_users)
|
||||
@project = project
|
||||
@jira_users = jira_users
|
||||
end
|
||||
|
||||
def execute
|
||||
jira_users.to_a.map do |jira_user|
|
||||
{
|
||||
jira_account_id: jira_user['accountId'],
|
||||
jira_display_name: jira_user['displayName'],
|
||||
jira_email: jira_user['emailAddress'],
|
||||
gitlab_id: match_user(jira_user)
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# TODO: Matching user by email and displayName will be done as the part
|
||||
# of follow-up issue: https://gitlab.com/gitlab-org/gitlab/-/issues/219023
|
||||
def match_user(jira_user)
|
||||
nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require "json-schema"
|
||||
|
||||
# JsonSchemaValidator
|
||||
#
|
||||
# Custom validator for json schema.
|
||||
# Create a json schema within the json_schemas directory
|
||||
#
|
||||
# class Project < ActiveRecord::Base
|
||||
# validates :data, json_schema: { filename: "file" }
|
||||
# end
|
||||
#
|
||||
class JsonSchemaValidator < ActiveModel::EachValidator
|
||||
def initialize(options)
|
||||
raise ArgumentError, "Expected 'filename' as an argument" unless options[:filename]
|
||||
|
||||
super(options)
|
||||
end
|
||||
|
||||
def validate_each(record, attribute, value)
|
||||
unless valid_schema?(value)
|
||||
record.errors.add(attribute, "must be a valid json schema")
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def valid_schema?(value)
|
||||
JSON::Validator.validate(schema_path, value)
|
||||
end
|
||||
|
||||
def schema_path
|
||||
Rails.root.join('app', 'validators', 'json_schemas', "#{options[:filename]}.json").to_s
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"description": "Build report result data",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"coverage": { "type": "float" },
|
||||
"junit": {
|
||||
"type": "object",
|
||||
"items": { "$ref": "./build_report_result_data_junit.json" }
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"description": "Build report result data junit",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"duration": { "type": "string" },
|
||||
"failed": { "type": "integer" },
|
||||
"errored": { "type": "integer" },
|
||||
"skipped": { "type": "integer" },
|
||||
"success": { "type": "integer" }
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Allow customization of badge key_text and key_width
|
||||
merge_request: 29381
|
||||
author: Fabian Schneider @fabsrc
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add database and GraphQL support for alert assignees
|
||||
merge_request: 32609
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add project specific repository storage API
|
||||
merge_request: 32493
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add build report results data model
|
||||
merge_request: 32991
|
||||
author:
|
||||
type: performance
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateAlertManagementAlertAssignees < ActiveRecord::Migration[6.0]
|
||||
DOWNTIME = false
|
||||
|
||||
ALERT_INDEX_NAME = 'index_alert_assignees_on_alert_id'
|
||||
UNIQUE_INDEX_NAME = 'index_alert_assignees_on_user_id_and_alert_id'
|
||||
|
||||
def up
|
||||
create_table :alert_management_alert_assignees do |t|
|
||||
t.bigint :user_id, null: false
|
||||
t.bigint :alert_id, null: false
|
||||
|
||||
t.index :alert_id, name: ALERT_INDEX_NAME
|
||||
t.index [:user_id, :alert_id], unique: true, name: UNIQUE_INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
drop_table :alert_management_alert_assignees
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddForeignKeyToUserIdOnAlertManagementAlertAssignees < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
add_foreign_key :alert_management_alert_assignees, :users, column: :user_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key :alert_management_alert_assignees, column: :user_id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddForeignKeyToAlertIdOnAlertMangagementAlertAssignees < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
add_foreign_key :alert_management_alert_assignees, :alert_management_alerts, column: :alert_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key :alert_management_alert_assignees, column: :alert_id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateCiBuildReportResultsTable < ActiveRecord::Migration[6.0]
|
||||
DOWNTIME = false
|
||||
|
||||
def change
|
||||
create_table :ci_build_report_results, id: false do |t|
|
||||
t.bigint :build_id, null: false, index: false, primary_key: true
|
||||
t.bigint :project_id, null: false, index: true
|
||||
t.jsonb :data, null: false, default: {}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddForeignKeyToBuildIdOnBuildReportResults < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
add_foreign_key :ci_build_report_results, :ci_builds, column: :build_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key :ci_build_report_results, column: :build_id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddForeignKeyToProjectIdOnBuildReportResults < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
add_foreign_key :ci_build_report_results, :projects, column: :project_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key :ci_build_report_results, column: :project_id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -24,6 +24,21 @@ CREATE SEQUENCE public.abuse_reports_id_seq
|
|||
|
||||
ALTER SEQUENCE public.abuse_reports_id_seq OWNED BY public.abuse_reports.id;
|
||||
|
||||
CREATE TABLE public.alert_management_alert_assignees (
|
||||
id bigint NOT NULL,
|
||||
user_id bigint NOT NULL,
|
||||
alert_id bigint NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE public.alert_management_alert_assignees_id_seq
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
ALTER SEQUENCE public.alert_management_alert_assignees_id_seq OWNED BY public.alert_management_alert_assignees.id;
|
||||
|
||||
CREATE TABLE public.alert_management_alerts (
|
||||
id bigint NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
|
|
@ -891,6 +906,21 @@ CREATE SEQUENCE public.ci_build_needs_id_seq
|
|||
|
||||
ALTER SEQUENCE public.ci_build_needs_id_seq OWNED BY public.ci_build_needs.id;
|
||||
|
||||
CREATE TABLE public.ci_build_report_results (
|
||||
build_id bigint NOT NULL,
|
||||
project_id bigint NOT NULL,
|
||||
data jsonb DEFAULT '{}'::jsonb NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE public.ci_build_report_results_build_id_seq
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
ALTER SEQUENCE public.ci_build_report_results_build_id_seq OWNED BY public.ci_build_report_results.build_id;
|
||||
|
||||
CREATE TABLE public.ci_build_trace_chunks (
|
||||
id bigint NOT NULL,
|
||||
build_id integer NOT NULL,
|
||||
|
|
@ -7323,6 +7353,8 @@ ALTER SEQUENCE public.zoom_meetings_id_seq OWNED BY public.zoom_meetings.id;
|
|||
|
||||
ALTER TABLE ONLY public.abuse_reports ALTER COLUMN id SET DEFAULT nextval('public.abuse_reports_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY public.alert_management_alert_assignees ALTER COLUMN id SET DEFAULT nextval('public.alert_management_alert_assignees_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY public.alert_management_alerts ALTER COLUMN id SET DEFAULT nextval('public.alert_management_alerts_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY public.alerts_service_data ALTER COLUMN id SET DEFAULT nextval('public.alerts_service_data_id_seq'::regclass);
|
||||
|
|
@ -7385,6 +7417,8 @@ ALTER TABLE ONLY public.chat_teams ALTER COLUMN id SET DEFAULT nextval('public.c
|
|||
|
||||
ALTER TABLE ONLY public.ci_build_needs ALTER COLUMN id SET DEFAULT nextval('public.ci_build_needs_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY public.ci_build_report_results ALTER COLUMN build_id SET DEFAULT nextval('public.ci_build_report_results_build_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY public.ci_build_trace_chunks ALTER COLUMN id SET DEFAULT nextval('public.ci_build_trace_chunks_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY public.ci_build_trace_section_names ALTER COLUMN id SET DEFAULT nextval('public.ci_build_trace_section_names_id_seq'::regclass);
|
||||
|
|
@ -7958,6 +7992,9 @@ ALTER TABLE ONLY public.zoom_meetings ALTER COLUMN id SET DEFAULT nextval('publi
|
|||
ALTER TABLE ONLY public.abuse_reports
|
||||
ADD CONSTRAINT abuse_reports_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY public.alert_management_alert_assignees
|
||||
ADD CONSTRAINT alert_management_alert_assignees_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY public.alert_management_alerts
|
||||
ADD CONSTRAINT alert_management_alerts_pkey PRIMARY KEY (id);
|
||||
|
||||
|
|
@ -8066,6 +8103,9 @@ ALTER TABLE public.lfs_objects
|
|||
ALTER TABLE ONLY public.ci_build_needs
|
||||
ADD CONSTRAINT ci_build_needs_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY public.ci_build_report_results
|
||||
ADD CONSTRAINT ci_build_report_results_pkey PRIMARY KEY (build_id);
|
||||
|
||||
ALTER TABLE ONLY public.ci_build_trace_chunks
|
||||
ADD CONSTRAINT ci_build_trace_chunks_pkey PRIMARY KEY (id);
|
||||
|
||||
|
|
@ -9080,6 +9120,10 @@ CREATE UNIQUE INDEX idx_vulnerability_issue_links_on_vulnerability_id_and_link_t
|
|||
|
||||
CREATE INDEX index_abuse_reports_on_user_id ON public.abuse_reports USING btree (user_id);
|
||||
|
||||
CREATE INDEX index_alert_assignees_on_alert_id ON public.alert_management_alert_assignees USING btree (alert_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_alert_assignees_on_user_id_and_alert_id ON public.alert_management_alert_assignees USING btree (user_id, alert_id);
|
||||
|
||||
CREATE INDEX index_alert_management_alerts_on_issue_id ON public.alert_management_alerts USING btree (issue_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_alert_management_alerts_on_project_id_and_fingerprint ON public.alert_management_alerts USING btree (project_id, fingerprint);
|
||||
|
|
@ -9230,6 +9274,8 @@ CREATE UNIQUE INDEX index_chat_teams_on_namespace_id ON public.chat_teams USING
|
|||
|
||||
CREATE UNIQUE INDEX index_ci_build_needs_on_build_id_and_name ON public.ci_build_needs USING btree (build_id, name);
|
||||
|
||||
CREATE INDEX index_ci_build_report_results_on_project_id ON public.ci_build_report_results USING btree (project_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_ci_build_trace_chunks_on_build_id_and_chunk_index ON public.ci_build_trace_chunks USING btree (build_id, chunk_index);
|
||||
|
||||
CREATE UNIQUE INDEX index_ci_build_trace_section_names_on_project_id_and_name ON public.ci_build_trace_section_names USING btree (project_id, name);
|
||||
|
|
@ -11651,6 +11697,9 @@ ALTER TABLE ONLY public.events
|
|||
ALTER TABLE ONLY public.ip_restrictions
|
||||
ADD CONSTRAINT fk_rails_04a93778d5 FOREIGN KEY (group_id) REFERENCES public.namespaces(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.ci_build_report_results
|
||||
ADD CONSTRAINT fk_rails_056d298d48 FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.ci_daily_build_group_report_results
|
||||
ADD CONSTRAINT fk_rails_0667f7608c FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
@ -11720,6 +11769,9 @@ ALTER TABLE ONLY public.diff_note_positions
|
|||
ALTER TABLE ONLY public.users_security_dashboard_projects
|
||||
ADD CONSTRAINT fk_rails_150cd5682c FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.ci_build_report_results
|
||||
ADD CONSTRAINT fk_rails_16cb1ff064 FOREIGN KEY (build_id) REFERENCES public.ci_builds(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.project_deploy_tokens
|
||||
ADD CONSTRAINT fk_rails_170e03cbaf FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
@ -12278,6 +12330,9 @@ ALTER TABLE ONLY public.list_user_preferences
|
|||
ALTER TABLE ONLY public.board_labels
|
||||
ADD CONSTRAINT fk_rails_9374a16edd FOREIGN KEY (board_id) REFERENCES public.boards(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.alert_management_alert_assignees
|
||||
ADD CONSTRAINT fk_rails_93c0f6703b FOREIGN KEY (alert_id) REFERENCES public.alert_management_alerts(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.scim_identities
|
||||
ADD CONSTRAINT fk_rails_9421a0bffb FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
@ -12548,6 +12603,9 @@ ALTER TABLE ONLY public.group_group_links
|
|||
ALTER TABLE ONLY public.vulnerability_issue_links
|
||||
ADD CONSTRAINT fk_rails_d459c19036 FOREIGN KEY (vulnerability_id) REFERENCES public.vulnerabilities(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.alert_management_alert_assignees
|
||||
ADD CONSTRAINT fk_rails_d47570ac62 FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.geo_hashed_storage_attachments_events
|
||||
ADD CONSTRAINT fk_rails_d496b088e9 FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
@ -13963,6 +14021,9 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200519194042
|
||||
20200520103514
|
||||
20200521022725
|
||||
20200521225327
|
||||
20200521225337
|
||||
20200521225346
|
||||
20200525114553
|
||||
20200525121014
|
||||
20200526000407
|
||||
|
|
@ -13972,5 +14033,8 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200526164947
|
||||
20200527094322
|
||||
20200527095401
|
||||
20200527151413
|
||||
20200527152116
|
||||
20200527152657
|
||||
\.
|
||||
|
||||
|
|
|
|||
|
|
@ -142,6 +142,11 @@ type AdminSidekiqQueuesDeleteJobsPayload {
|
|||
Describes an alert from the project's Alert Management
|
||||
"""
|
||||
type AlertManagementAlert {
|
||||
"""
|
||||
Assignees of the alert
|
||||
"""
|
||||
assignees: [User!]
|
||||
|
||||
"""
|
||||
Timestamp the alert was created
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -394,6 +394,28 @@
|
|||
"name": "AlertManagementAlert",
|
||||
"description": "Describes an alert from the project's Alert Management",
|
||||
"fields": [
|
||||
{
|
||||
"name": "assignees",
|
||||
"description": "Assignees of the alert",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "OBJECT",
|
||||
"name": "User",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "createdAt",
|
||||
"description": "Timestamp the alert was created",
|
||||
|
|
|
|||
|
|
@ -52,6 +52,7 @@ Describes an alert from the project's Alert Management
|
|||
|
||||
| Name | Type | Description |
|
||||
| --- | ---- | ---------- |
|
||||
| `assignees` | User! => Array | Assignees of the alert |
|
||||
| `createdAt` | Time | Timestamp the alert was created |
|
||||
| `description` | String | Description of the alert |
|
||||
| `details` | JSON | Alert details |
|
||||
|
|
|
|||
|
|
@ -41,17 +41,60 @@ Example response:
|
|||
]
|
||||
```
|
||||
|
||||
## Retrieve all repository storage moves for a project
|
||||
|
||||
```plaintext
|
||||
GET /projects/:project_id/repository_storage_moves
|
||||
```
|
||||
|
||||
By default, `GET` requests return 20 results at a time because the API results
|
||||
are [paginated](README.md#pagination).
|
||||
|
||||
Parameters:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `project_id` | integer | yes | The ID of the project |
|
||||
|
||||
Example request:
|
||||
|
||||
```shell
|
||||
curl --header "PRIVATE-TOKEN: <your_access_token>" 'https://primary.example.com/api/v4/project/1/repository_storage_moves'
|
||||
```
|
||||
|
||||
Example response:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"created_at": "2020-05-07T04:27:17.234Z",
|
||||
"state": "scheduled",
|
||||
"source_storage_name": "default",
|
||||
"destination_storage_name": "storage2",
|
||||
"project": {
|
||||
"id": 1,
|
||||
"description": null,
|
||||
"name": "project1",
|
||||
"name_with_namespace": "John Doe2 / project1",
|
||||
"path": "project1",
|
||||
"path_with_namespace": "namespace1/project1",
|
||||
"created_at": "2020-05-07T04:27:17.016Z"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Get a single project repository storage move
|
||||
|
||||
```plaintext
|
||||
GET /project_repository_storage_moves/:id
|
||||
GET /project_repository_storage_moves/:repository_storage_id
|
||||
```
|
||||
|
||||
Parameters:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `id` | integer | yes | The ID of the project repository storage move |
|
||||
| `repository_storage_id` | integer | yes | The ID of the project repository storage move |
|
||||
|
||||
Example request:
|
||||
|
||||
|
|
@ -78,3 +121,42 @@ Example response:
|
|||
"created_at": "2020-05-07T04:27:17.016Z"
|
||||
}
|
||||
```
|
||||
|
||||
## Get a single repository storage move for a project
|
||||
|
||||
```plaintext
|
||||
GET /project/:project_id/repository_storage_moves/:repository_storage_id
|
||||
```
|
||||
|
||||
Parameters:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `project_id` | integer | yes | The ID of the project |
|
||||
| `repository_storage_id` | integer | yes | The ID of the project repository storage move |
|
||||
|
||||
Example request:
|
||||
|
||||
```shell
|
||||
curl --header "PRIVATE-TOKEN: <your_access_token>" 'https://primary.example.com/api/v4/project/1/repository_storage_moves/1'
|
||||
```
|
||||
|
||||
Example response:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": 1,
|
||||
"created_at": "2020-05-07T04:27:17.234Z",
|
||||
"state": "scheduled",
|
||||
"source_storage_name": "default",
|
||||
"destination_storage_name": "storage2",
|
||||
"project": {
|
||||
"id": 1,
|
||||
"description": null,
|
||||
"name": "project1",
|
||||
"name_with_namespace": "John Doe2 / project1",
|
||||
"path": "project1",
|
||||
"path_with_namespace": "namespace1/project1",
|
||||
"created_at": "2020-05-07T04:27:17.016Z"
|
||||
}
|
||||
```
|
||||
|
|
|
|||
|
|
@ -301,6 +301,16 @@ https://example.gitlab.com/<namespace>/<project>/badges/<branch>/coverage.svg?st
|
|||
|
||||

|
||||
|
||||
### Custom badge text
|
||||
|
||||
The text for a badge can be customized. This can be useful to differentiate between multiple coverage jobs that run in the same pipeline. Customize the badge text and width by adding the `key_text=custom_text` and `key_width=custom_key_width` parameters to the URL:
|
||||
|
||||
```plaintext
|
||||
https://gitlab.com/gitlab-org/gitlab-foss/badges/master/coverage.svg?job=karma&key_text=Frontend+Coverage&key_width=100
|
||||
```
|
||||
|
||||

|
||||
|
||||
## Environment Variables
|
||||
|
||||
[Environment variables](../variables/README.md#gitlab-cicd-environment-variables) can be set in an environment to be available to a runner.
|
||||
|
|
|
|||
|
|
@ -404,9 +404,8 @@ script:
|
|||
|
||||
You can define per-project or per-group variables
|
||||
that are set in the pipeline environment. Group-level variables are stored out of
|
||||
the repository (not in `.gitlab-ci.yml`) and are securely passed to GitLab Runner
|
||||
making them available during a pipeline run. It's the **recommended method** to
|
||||
use for storing things like passwords, SSH keys, and credentials.
|
||||
the repository (not in `.gitlab-ci.yml`) and are securely passed to GitLab Runner,
|
||||
which makes them available during a pipeline run. For Premium users who do **not** use an external key store or who use GitLab's [integration with HashiCorp Vault](../examples/authenticating-with-hashicorp-vault/index.md), we recommend using group environment variables to store secrets like passwords, SSH keys, and credentials.
|
||||
|
||||
Group-level variables can be added by:
|
||||
|
||||
|
|
|
|||
|
|
@ -325,6 +325,9 @@ tenses, words, and phrases:
|
|||
- Avoid using the word *currently* when talking about the product or its
|
||||
features. The documentation describes the product as it is, and not as it
|
||||
will be at some indeterminate point in the future.
|
||||
- Don't use profanity or obscenities. Doing so may negatively affect other
|
||||
users and contributors, which is contrary to GitLab's value of
|
||||
[diversity and inclusion](https://about.gitlab.com/handbook/values/#diversity-inclusion).
|
||||
|
||||
### Word usage clarifications
|
||||
|
||||
|
|
|
|||
|
|
@ -641,7 +641,7 @@ recommended that you configure the appropriate retention policy for your object
|
|||
storage (for example, [AWS S3](https://docs.aws.amazon.com/AmazonS3/latest/user-guide/create-lifecycle.html)).
|
||||
|
||||
You may want to set a limited lifetime for backups to prevent regular
|
||||
backups using all your disk space.
|
||||
backups using all your disk space. The next time the backup task is run, backups older than the `backup_keep_time` will be pruned.
|
||||
|
||||
For Omnibus GitLab packages:
|
||||
|
||||
|
|
|
|||
|
|
@ -24,8 +24,42 @@ module API
|
|||
detail 'This feature was introduced in GitLab 13.0.'
|
||||
success Entities::ProjectRepositoryStorageMove
|
||||
end
|
||||
get ':id' do
|
||||
storage_move = ProjectRepositoryStorageMove.find(params[:id])
|
||||
params do
|
||||
requires :repository_storage_move_id, type: Integer, desc: 'The ID of a project repository storage move'
|
||||
end
|
||||
get ':repository_storage_move_id' do
|
||||
storage_move = ProjectRepositoryStorageMove.find(params[:repository_storage_move_id])
|
||||
|
||||
present storage_move, with: Entities::ProjectRepositoryStorageMove, current_user: current_user
|
||||
end
|
||||
end
|
||||
|
||||
params do
|
||||
requires :id, type: String, desc: 'The ID of a project'
|
||||
end
|
||||
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
|
||||
desc 'Get a list of all project repository storage moves' do
|
||||
detail 'This feature was introduced in GitLab 13.1.'
|
||||
success Entities::ProjectRepositoryStorageMove
|
||||
end
|
||||
params do
|
||||
use :pagination
|
||||
end
|
||||
get ':id/repository_storage_moves' do
|
||||
storage_moves = user_project.repository_storage_moves.with_projects.order_created_at_desc
|
||||
|
||||
present paginate(storage_moves), with: Entities::ProjectRepositoryStorageMove, current_user: current_user
|
||||
end
|
||||
|
||||
desc 'Get a project repository storage move' do
|
||||
detail 'This feature was introduced in GitLab 13.1.'
|
||||
success Entities::ProjectRepositoryStorageMove
|
||||
end
|
||||
params do
|
||||
requires :repository_storage_move_id, type: Integer, desc: 'The ID of a project repository storage move'
|
||||
end
|
||||
get ':id/repository_storage_moves/:repository_storage_move_id' do
|
||||
storage_move = user_project.repository_storage_moves.find(params[:repository_storage_move_id])
|
||||
|
||||
present storage_move, with: Entities::ProjectRepositoryStorageMove, current_user: current_user
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,21 +8,24 @@ module Gitlab
|
|||
|
||||
delegate :subject_class, to: :stage
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
FINDER_CLASSES = {
|
||||
MergeRequest.to_s => MergeRequestsFinder,
|
||||
Issue.to_s => IssuesFinder
|
||||
}.freeze
|
||||
|
||||
def initialize(stage:, params: {})
|
||||
@stage = stage
|
||||
@params = params
|
||||
@params = build_finder_params(params)
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def build
|
||||
query = subject_class
|
||||
query = filter_by_parent_model(query)
|
||||
query = filter_by_time_range(query)
|
||||
query = finder.execute
|
||||
query = stage.start_event.apply_query_customization(query)
|
||||
query = stage.end_event.apply_query_customization(query)
|
||||
query.where(duration_condition)
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
private
|
||||
|
||||
|
|
@ -32,38 +35,33 @@ module Gitlab
|
|||
stage.end_event.timestamp_projection.gteq(stage.start_event.timestamp_projection)
|
||||
end
|
||||
|
||||
def filter_by_parent_model(query)
|
||||
if parent_class.eql?(Project)
|
||||
if subject_class.eql?(Issue)
|
||||
query.where(project_id: stage.parent_id)
|
||||
elsif subject_class.eql?(MergeRequest)
|
||||
query.where(target_project_id: stage.parent_id)
|
||||
else
|
||||
raise ArgumentError, "unknown subject_class: #{subject_class}"
|
||||
end
|
||||
else
|
||||
raise ArgumentError, "unknown parent_class: #{parent_class}"
|
||||
end
|
||||
end
|
||||
|
||||
def filter_by_time_range(query)
|
||||
from = params.fetch(:from, 30.days.ago)
|
||||
to = params[:to]
|
||||
|
||||
query = query.where(subject_table[:created_at].gteq(from))
|
||||
query = query.where(subject_table[:created_at].lteq(to)) if to
|
||||
query
|
||||
end
|
||||
|
||||
def subject_table
|
||||
subject_class.arel_table
|
||||
def finder
|
||||
FINDER_CLASSES.fetch(subject_class.to_s).new(params[:current_user], params)
|
||||
end
|
||||
|
||||
def parent_class
|
||||
stage.parent.class
|
||||
end
|
||||
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
def build_finder_params(params)
|
||||
{}.tap do |finder_params|
|
||||
finder_params[:current_user] = params[:current_user]
|
||||
|
||||
add_parent_model_params!(finder_params)
|
||||
add_time_range_params!(finder_params, params[:from], params[:to])
|
||||
end
|
||||
end
|
||||
|
||||
def add_parent_model_params!(finder_params)
|
||||
raise(ArgumentError, "unknown parent_class: #{parent_class}") unless parent_class.eql?(Project)
|
||||
|
||||
finder_params[:project_id] = stage.parent_id
|
||||
end
|
||||
|
||||
def add_time_range_params!(finder_params, from, to)
|
||||
finder_params[:created_after] = from || 30.days.ago
|
||||
finder_params[:created_before] = to if to
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,12 +11,14 @@ module Gitlab
|
|||
@query = query
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def seconds
|
||||
@query = @query.select(median_duration_in_seconds.as('median'))
|
||||
@query = @query.select(median_duration_in_seconds.as('median')).reorder(nil)
|
||||
result = execute_query(@query).first || {}
|
||||
|
||||
result['median'] || nil
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
def days
|
||||
seconds ? seconds.fdiv(1.day) : nil
|
||||
|
|
|
|||
|
|
@ -12,13 +12,11 @@ module Gitlab
|
|||
|
||||
MAPPINGS = {
|
||||
Issue => {
|
||||
finder_class: IssuesFinder,
|
||||
serializer_class: AnalyticsIssueSerializer,
|
||||
includes_for_query: { project: [:namespace], author: [] },
|
||||
columns_for_select: %I[title iid id created_at author_id project_id]
|
||||
},
|
||||
MergeRequest => {
|
||||
finder_class: MergeRequestsFinder,
|
||||
serializer_class: AnalyticsMergeRequestSerializer,
|
||||
includes_for_query: { target_project: [:namespace], author: [] },
|
||||
columns_for_select: %I[title iid id created_at author_id state_id target_project_id]
|
||||
|
|
@ -56,27 +54,12 @@ module Gitlab
|
|||
|
||||
attr_reader :stage, :query, :params
|
||||
|
||||
def finder_query
|
||||
MAPPINGS
|
||||
.fetch(subject_class)
|
||||
.fetch(:finder_class)
|
||||
.new(params.fetch(:current_user), finder_params.fetch(stage.parent.class))
|
||||
.execute
|
||||
end
|
||||
|
||||
def columns
|
||||
MAPPINGS.fetch(subject_class).fetch(:columns_for_select).map do |column_name|
|
||||
subject_class.arel_table[column_name]
|
||||
end
|
||||
end
|
||||
|
||||
# EE will override this to include Group rules
|
||||
def finder_params
|
||||
{
|
||||
Project => { project_id: stage.parent_id }
|
||||
}
|
||||
end
|
||||
|
||||
def default_test_stage?
|
||||
stage.matches_with_stage_params?(Gitlab::Analytics::CycleAnalytics::DefaultStages.params_for_test_stage)
|
||||
end
|
||||
|
|
@ -113,8 +96,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def records
|
||||
results = finder_query
|
||||
.merge(ordered_and_limited_query)
|
||||
results = ordered_and_limited_query
|
||||
.select(*columns, round_duration_to_seconds.as('total_time'))
|
||||
|
||||
# using preloader instead of includes to avoid AR generating a large column list
|
||||
|
|
|
|||
|
|
@ -7,12 +7,16 @@ module Gitlab
|
|||
# Test coverage report badge
|
||||
#
|
||||
class Report < Badge::Base
|
||||
attr_reader :project, :ref, :job
|
||||
attr_reader :project, :ref, :job, :customization
|
||||
|
||||
def initialize(project, ref, job = nil)
|
||||
def initialize(project, ref, opts: { job: nil })
|
||||
@project = project
|
||||
@ref = ref
|
||||
@job = job
|
||||
@job = opts[:job]
|
||||
@customization = {
|
||||
key_width: opts[:key_width].to_i,
|
||||
key_text: opts[:key_text]
|
||||
}
|
||||
|
||||
@pipeline = @project.ci_pipelines.latest_successful_for_ref(@ref)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -20,10 +20,16 @@ module Gitlab
|
|||
def initialize(badge)
|
||||
@entity = badge.entity
|
||||
@status = badge.status
|
||||
@key_text = badge.customization.dig(:key_text)
|
||||
@key_width = badge.customization.dig(:key_width)
|
||||
end
|
||||
|
||||
def key_text
|
||||
@entity.to_s
|
||||
if @key_text && @key_text.size <= MAX_KEY_SIZE
|
||||
@key_text
|
||||
else
|
||||
@entity.to_s
|
||||
end
|
||||
end
|
||||
|
||||
def value_text
|
||||
|
|
@ -31,7 +37,11 @@ module Gitlab
|
|||
end
|
||||
|
||||
def key_width
|
||||
62
|
||||
if @key_width && @key_width.between?(1, MAX_KEY_SIZE)
|
||||
@key_width
|
||||
else
|
||||
62
|
||||
end
|
||||
end
|
||||
|
||||
def value_width
|
||||
|
|
|
|||
|
|
@ -7,11 +7,15 @@ module Gitlab
|
|||
# Pipeline status badge
|
||||
#
|
||||
class Status < Badge::Base
|
||||
attr_reader :project, :ref
|
||||
attr_reader :project, :ref, :customization
|
||||
|
||||
def initialize(project, ref)
|
||||
def initialize(project, ref, opts: {})
|
||||
@project = project
|
||||
@ref = ref
|
||||
@customization = {
|
||||
key_width: opts[:key_width].to_i,
|
||||
key_text: opts[:key_text]
|
||||
}
|
||||
|
||||
@sha = @project.commit(@ref).try(:sha)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -24,10 +24,16 @@ module Gitlab
|
|||
def initialize(badge)
|
||||
@entity = badge.entity
|
||||
@status = badge.status
|
||||
@key_text = badge.customization.dig(:key_text)
|
||||
@key_width = badge.customization.dig(:key_width)
|
||||
end
|
||||
|
||||
def key_text
|
||||
@entity.to_s
|
||||
if @key_text && @key_text.size <= MAX_KEY_SIZE
|
||||
@key_text
|
||||
else
|
||||
@entity.to_s
|
||||
end
|
||||
end
|
||||
|
||||
def value_text
|
||||
|
|
@ -35,7 +41,11 @@ module Gitlab
|
|||
end
|
||||
|
||||
def key_width
|
||||
62
|
||||
if @key_width && @key_width.between?(1, MAX_KEY_SIZE)
|
||||
@key_width
|
||||
else
|
||||
62
|
||||
end
|
||||
end
|
||||
|
||||
def value_width
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ module Gitlab
|
|||
# Abstract template class for badges
|
||||
#
|
||||
class Template
|
||||
MAX_KEY_SIZE = 128
|
||||
|
||||
def initialize(badge)
|
||||
@entity = badge.entity
|
||||
@status = badge.status
|
||||
|
|
|
|||
|
|
@ -3,120 +3,8 @@
|
|||
module Gitlab
|
||||
module Database
|
||||
module PartitioningMigrationHelpers
|
||||
include SchemaHelpers
|
||||
|
||||
def add_partitioned_foreign_key(from_table, to_table, column: nil, primary_key: :id, on_delete: :cascade)
|
||||
cascade_delete = extract_cascade_option(on_delete)
|
||||
|
||||
update_foreign_keys(from_table, to_table, column, primary_key, cascade_delete) do |current_keys, existing_key, specified_key|
|
||||
if existing_key.nil?
|
||||
unless specified_key.save
|
||||
raise "failed to create foreign key: #{specified_key.errors.full_messages.to_sentence}"
|
||||
end
|
||||
|
||||
current_keys << specified_key
|
||||
else
|
||||
Rails.logger.warn "foreign key not added because it already exists: #{specified_key}" # rubocop:disable Gitlab/RailsLogger
|
||||
current_keys
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def remove_partitioned_foreign_key(from_table, to_table, column: nil, primary_key: :id)
|
||||
update_foreign_keys(from_table, to_table, column, primary_key) do |current_keys, existing_key, specified_key|
|
||||
if existing_key
|
||||
existing_key.destroy!
|
||||
current_keys.delete(existing_key)
|
||||
else
|
||||
Rails.logger.warn "foreign key not removed because it doesn't exist: #{specified_key}" # rubocop:disable Gitlab/RailsLogger
|
||||
end
|
||||
|
||||
current_keys
|
||||
end
|
||||
end
|
||||
|
||||
def fk_function_name(table)
|
||||
object_name(table, 'fk_cascade_function')
|
||||
end
|
||||
|
||||
def fk_trigger_name(table)
|
||||
object_name(table, 'fk_cascade_trigger')
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def fk_from_spec(from_table, to_table, from_column, to_column, cascade_delete)
|
||||
PartitionedForeignKey.new(from_table: from_table.to_s, to_table: to_table.to_s, from_column: from_column.to_s,
|
||||
to_column: to_column.to_s, cascade_delete: cascade_delete)
|
||||
end
|
||||
|
||||
def update_foreign_keys(from_table, to_table, from_column, to_column, cascade_delete = nil)
|
||||
if transaction_open?
|
||||
raise 'partitioned foreign key operations can not be run inside a transaction block, ' \
|
||||
'you can disable transaction blocks by calling disable_ddl_transaction! ' \
|
||||
'in the body of your migration class'
|
||||
end
|
||||
|
||||
from_column ||= "#{to_table.to_s.singularize}_id"
|
||||
specified_key = fk_from_spec(from_table, to_table, from_column, to_column, cascade_delete)
|
||||
|
||||
current_keys = PartitionedForeignKey.by_referenced_table(to_table).to_a
|
||||
existing_key = find_existing_key(current_keys, specified_key)
|
||||
|
||||
final_keys = yield current_keys, existing_key, specified_key
|
||||
|
||||
fn_name = fk_function_name(to_table)
|
||||
trigger_name = fk_trigger_name(to_table)
|
||||
|
||||
with_lock_retries do
|
||||
drop_trigger(to_table, trigger_name, if_exists: true)
|
||||
|
||||
if final_keys.empty?
|
||||
drop_function(fn_name, if_exists: true)
|
||||
else
|
||||
create_or_replace_fk_function(fn_name, final_keys)
|
||||
create_function_trigger(trigger_name, fn_name, fires: "AFTER DELETE ON #{to_table}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def extract_cascade_option(on_delete)
|
||||
case on_delete
|
||||
when :cascade then true
|
||||
when :nullify then false
|
||||
else raise ArgumentError, "invalid option #{on_delete} for :on_delete"
|
||||
end
|
||||
end
|
||||
|
||||
def with_lock_retries(&block)
|
||||
Gitlab::Database::WithLockRetries.new({
|
||||
klass: self.class,
|
||||
logger: Gitlab::BackgroundMigration::Logger
|
||||
}).run(&block)
|
||||
end
|
||||
|
||||
def find_existing_key(keys, key)
|
||||
keys.find { |k| k.from_table == key.from_table && k.from_column == key.from_column }
|
||||
end
|
||||
|
||||
def create_or_replace_fk_function(fn_name, fk_specs)
|
||||
create_trigger_function(fn_name, replace: true) do
|
||||
cascade_statements = build_cascade_statements(fk_specs)
|
||||
cascade_statements << 'RETURN OLD;'
|
||||
|
||||
cascade_statements.join("\n")
|
||||
end
|
||||
end
|
||||
|
||||
def build_cascade_statements(foreign_keys)
|
||||
foreign_keys.map do |fks|
|
||||
if fks.cascade_delete?
|
||||
"DELETE FROM #{fks.from_table} WHERE #{fks.from_column} = OLD.#{fks.to_column};"
|
||||
else
|
||||
"UPDATE #{fks.from_table} SET #{fks.from_column} = NULL WHERE #{fks.from_column} = OLD.#{fks.to_column};"
|
||||
end
|
||||
end
|
||||
end
|
||||
include ForeignKeyHelpers
|
||||
include TableManagementHelpers
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,151 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Database
|
||||
module PartitioningMigrationHelpers
|
||||
module ForeignKeyHelpers
|
||||
include ::Gitlab::Database::SchemaHelpers
|
||||
|
||||
# Creates a "foreign key" that references a partitioned table. Because foreign keys referencing partitioned
|
||||
# tables are not supported in PG11, this does not create a true database foreign key, but instead implements the
|
||||
# same functionality at the database level by using triggers.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# add_partitioned_foreign_key :issues, :projects
|
||||
#
|
||||
# Available options:
|
||||
#
|
||||
# :column - name of the referencing column (otherwise inferred from the referenced table name)
|
||||
# :primary_key - name of the primary key in the referenced table (defaults to id)
|
||||
# :on_delete - supports either :cascade for ON DELETE CASCADE or :nullify for ON DELETE SET NULL
|
||||
#
|
||||
def add_partitioned_foreign_key(from_table, to_table, column: nil, primary_key: :id, on_delete: :cascade)
|
||||
cascade_delete = extract_cascade_option(on_delete)
|
||||
|
||||
update_foreign_keys(from_table, to_table, column, primary_key, cascade_delete) do |current_keys, existing_key, specified_key|
|
||||
if existing_key.nil?
|
||||
unless specified_key.save
|
||||
raise "failed to create foreign key: #{specified_key.errors.full_messages.to_sentence}"
|
||||
end
|
||||
|
||||
current_keys << specified_key
|
||||
else
|
||||
Rails.logger.warn "foreign key not added because it already exists: #{specified_key}" # rubocop:disable Gitlab/RailsLogger
|
||||
current_keys
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Drops a "foreign key" that references a partitioned table. This method ONLY applies to foreign keys previously
|
||||
# created through the `add_partitioned_foreign_key` method. Standard database foreign keys should be managed
|
||||
# through the familiar Rails helpers.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# remove_partitioned_foreign_key :issues, :projects
|
||||
#
|
||||
# Available options:
|
||||
#
|
||||
# :column - name of the referencing column (otherwise inferred from the referenced table name)
|
||||
# :primary_key - name of the primary key in the referenced table (defaults to id)
|
||||
#
|
||||
def remove_partitioned_foreign_key(from_table, to_table, column: nil, primary_key: :id)
|
||||
update_foreign_keys(from_table, to_table, column, primary_key) do |current_keys, existing_key, specified_key|
|
||||
if existing_key
|
||||
existing_key.delete
|
||||
current_keys.delete(existing_key)
|
||||
else
|
||||
Rails.logger.warn "foreign key not removed because it doesn't exist: #{specified_key}" # rubocop:disable Gitlab/RailsLogger
|
||||
end
|
||||
|
||||
current_keys
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def fk_function_name(table)
|
||||
object_name(table, 'fk_cascade_function')
|
||||
end
|
||||
|
||||
def fk_trigger_name(table)
|
||||
object_name(table, 'fk_cascade_trigger')
|
||||
end
|
||||
|
||||
def fk_from_spec(from_table, to_table, from_column, to_column, cascade_delete)
|
||||
PartitionedForeignKey.new(from_table: from_table.to_s, to_table: to_table.to_s, from_column: from_column.to_s,
|
||||
to_column: to_column.to_s, cascade_delete: cascade_delete)
|
||||
end
|
||||
|
||||
def update_foreign_keys(from_table, to_table, from_column, to_column, cascade_delete = nil)
|
||||
if transaction_open?
|
||||
raise 'partitioned foreign key operations can not be run inside a transaction block, ' \
|
||||
'you can disable transaction blocks by calling disable_ddl_transaction! ' \
|
||||
'in the body of your migration class'
|
||||
end
|
||||
|
||||
from_column ||= "#{to_table.to_s.singularize}_id"
|
||||
specified_key = fk_from_spec(from_table, to_table, from_column, to_column, cascade_delete)
|
||||
|
||||
current_keys = PartitionedForeignKey.by_referenced_table(to_table).to_a
|
||||
existing_key = find_existing_key(current_keys, specified_key)
|
||||
|
||||
final_keys = yield current_keys, existing_key, specified_key
|
||||
|
||||
fn_name = fk_function_name(to_table)
|
||||
trigger_name = fk_trigger_name(to_table)
|
||||
|
||||
with_lock_retries do
|
||||
drop_trigger(to_table, trigger_name, if_exists: true)
|
||||
|
||||
if final_keys.empty?
|
||||
drop_function(fn_name, if_exists: true)
|
||||
else
|
||||
create_or_replace_fk_function(fn_name, final_keys)
|
||||
create_function_trigger(trigger_name, fn_name, fires: "AFTER DELETE ON #{to_table}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def extract_cascade_option(on_delete)
|
||||
case on_delete
|
||||
when :cascade then true
|
||||
when :nullify then false
|
||||
else raise ArgumentError, "invalid option #{on_delete} for :on_delete"
|
||||
end
|
||||
end
|
||||
|
||||
def with_lock_retries(&block)
|
||||
Gitlab::Database::WithLockRetries.new({
|
||||
klass: self.class,
|
||||
logger: Gitlab::BackgroundMigration::Logger
|
||||
}).run(&block)
|
||||
end
|
||||
|
||||
def find_existing_key(keys, key)
|
||||
keys.find { |k| k.from_table == key.from_table && k.from_column == key.from_column }
|
||||
end
|
||||
|
||||
def create_or_replace_fk_function(fn_name, fk_specs)
|
||||
create_trigger_function(fn_name, replace: true) do
|
||||
cascade_statements = build_cascade_statements(fk_specs)
|
||||
cascade_statements << 'RETURN OLD;'
|
||||
|
||||
cascade_statements.join("\n")
|
||||
end
|
||||
end
|
||||
|
||||
def build_cascade_statements(foreign_keys)
|
||||
foreign_keys.map do |fks|
|
||||
if fks.cascade_delete?
|
||||
"DELETE FROM #{fks.from_table} WHERE #{fks.from_column} = OLD.#{fks.to_column};"
|
||||
else
|
||||
"UPDATE #{fks.from_table} SET #{fks.from_column} = NULL WHERE #{fks.from_column} = OLD.#{fks.to_column};"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,102 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Database
|
||||
module PartitioningMigrationHelpers
|
||||
module TableManagementHelpers
|
||||
include SchemaHelpers
|
||||
|
||||
# Creates a partitioned copy of an existing table, using a RANGE partitioning strategy on a timestamp column.
|
||||
# One partition is created per month between the given `min_date` and `max_date`.
|
||||
#
|
||||
# A copy of the original table is required as PG currently does not support partitioning existing tables.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# partition_table_by_date :audit_events, :created_at, min_date: Date.new(2020, 1), max_date: Date.new(2020, 6)
|
||||
#
|
||||
# Required options are:
|
||||
# :min_date - a date specifying the lower bounds of the partition range
|
||||
# :max_date - a date specifying the upper bounds of the partitioning range
|
||||
#
|
||||
def partition_table_by_date(table_name, column_name, min_date:, max_date:)
|
||||
raise "max_date #{max_date} must be greater than min_date #{min_date}" if min_date >= max_date
|
||||
|
||||
primary_key = connection.primary_key(table_name)
|
||||
raise "primary key not defined for #{table_name}" if primary_key.nil?
|
||||
|
||||
partition_column = find_column_definition(table_name, column_name)
|
||||
raise "partition column #{column_name} does not exist on #{table_name}" if partition_column.nil?
|
||||
|
||||
new_table_name = partitioned_table_name(table_name)
|
||||
create_range_partitioned_copy(new_table_name, table_name, partition_column, primary_key)
|
||||
create_daterange_partitions(new_table_name, partition_column.name, min_date, max_date)
|
||||
end
|
||||
|
||||
# Clean up a partitioned copy of an existing table. This deletes the partitioned table and all partitions.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# drop_partitioned_table_for :audit_events
|
||||
#
|
||||
def drop_partitioned_table_for(table_name)
|
||||
drop_table(partitioned_table_name(table_name))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def partitioned_table_name(table)
|
||||
tmp_table_name("#{table}_part")
|
||||
end
|
||||
|
||||
def find_column_definition(table, column)
|
||||
connection.columns(table).find { |c| c.name == column.to_s }
|
||||
end
|
||||
|
||||
def create_range_partitioned_copy(table_name, template_table_name, partition_column, primary_key)
|
||||
tmp_column_name = object_name(partition_column.name, 'partition_key')
|
||||
|
||||
execute(<<~SQL)
|
||||
CREATE TABLE #{table_name} (
|
||||
LIKE #{template_table_name} INCLUDING ALL EXCLUDING INDEXES,
|
||||
#{tmp_column_name} #{partition_column.sql_type} NOT NULL,
|
||||
PRIMARY KEY (#{[primary_key, tmp_column_name].join(", ")})
|
||||
) PARTITION BY RANGE (#{tmp_column_name})
|
||||
SQL
|
||||
|
||||
remove_column(table_name, partition_column.name)
|
||||
rename_column(table_name, tmp_column_name, partition_column.name)
|
||||
change_column_default(table_name, primary_key, nil)
|
||||
end
|
||||
|
||||
def create_daterange_partitions(table_name, column_name, min_date, max_date)
|
||||
min_date = min_date.beginning_of_month.to_date
|
||||
max_date = max_date.next_month.beginning_of_month.to_date
|
||||
|
||||
create_range_partition("#{table_name}_000000", table_name, 'MINVALUE', to_sql_date_literal(min_date))
|
||||
|
||||
while min_date < max_date
|
||||
partition_name = "#{table_name}_#{min_date.strftime('%Y%m')}"
|
||||
next_date = min_date.next_month
|
||||
lower_bound = to_sql_date_literal(min_date)
|
||||
upper_bound = to_sql_date_literal(next_date)
|
||||
|
||||
create_range_partition(partition_name, table_name, lower_bound, upper_bound)
|
||||
min_date = next_date
|
||||
end
|
||||
end
|
||||
|
||||
def to_sql_date_literal(date)
|
||||
connection.quote(date.strftime('%Y-%m-%d'))
|
||||
end
|
||||
|
||||
def create_range_partition(partition_name, table_name, lower_bound, upper_bound)
|
||||
execute(<<~SQL)
|
||||
CREATE TABLE #{partition_name} PARTITION OF #{table_name}
|
||||
FOR VALUES FROM (#{lower_bound}) TO (#{upper_bound})
|
||||
SQL
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -35,6 +35,12 @@ module Gitlab
|
|||
execute("DROP TRIGGER #{exists_clause} #{name} ON #{table_name}")
|
||||
end
|
||||
|
||||
def tmp_table_name(base)
|
||||
hashed_base = Digest::SHA256.hexdigest(base).first(10)
|
||||
|
||||
"#{base}_#{hashed_base}"
|
||||
end
|
||||
|
||||
def object_name(table, type)
|
||||
identifier = "#{table}_#{type}"
|
||||
hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
|
||||
|
|
|
|||
|
|
@ -1801,6 +1801,9 @@ msgstr ""
|
|||
msgid "AlertManagement|Assign status"
|
||||
msgstr ""
|
||||
|
||||
msgid "AlertManagement|Assignees"
|
||||
msgstr ""
|
||||
|
||||
msgid "AlertManagement|Authorize external service"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -1894,6 +1897,9 @@ msgstr ""
|
|||
msgid "AlertManagement|Triggered"
|
||||
msgstr ""
|
||||
|
||||
msgid "AlertManagement|Unassigned"
|
||||
msgstr ""
|
||||
|
||||
msgid "AlertManagement|Unknown"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ describe Projects::BadgesController do
|
|||
|
||||
context 'when style param is set to `flat`' do
|
||||
it 'renders the `flat` badge layout' do
|
||||
get_badge(badge_type, 'flat')
|
||||
get_badge(badge_type, style: 'flat')
|
||||
|
||||
expect(response).to render_template('projects/badges/badge')
|
||||
end
|
||||
|
|
@ -62,7 +62,7 @@ describe Projects::BadgesController do
|
|||
|
||||
context 'when style param is set to an invalid type' do
|
||||
it 'renders the `flat` (default) badge layout' do
|
||||
get_badge(badge_type, 'xxx')
|
||||
get_badge(badge_type, style: 'xxx')
|
||||
|
||||
expect(response).to render_template('projects/badges/badge')
|
||||
end
|
||||
|
|
@ -70,7 +70,7 @@ describe Projects::BadgesController do
|
|||
|
||||
context 'when style param is set to `flat-square`' do
|
||||
it 'renders the `flat-square` badge layout' do
|
||||
get_badge(badge_type, 'flat-square')
|
||||
get_badge(badge_type, style: 'flat-square')
|
||||
|
||||
expect(response).to render_template('projects/badges/badge_flat-square')
|
||||
end
|
||||
|
|
@ -102,12 +102,37 @@ describe Projects::BadgesController do
|
|||
end
|
||||
|
||||
it 'defaults to project permissions' do
|
||||
get_badge(:coverage)
|
||||
get_badge(badge_type)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'customization' do
|
||||
render_views
|
||||
|
||||
before do
|
||||
project.add_maintainer(user)
|
||||
sign_in(user)
|
||||
end
|
||||
|
||||
context 'when key_text param is used' do
|
||||
it 'sets custom key text' do
|
||||
get_badge(badge_type, key_text: 'custom key text')
|
||||
|
||||
expect(response.body).to include('custom key text')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when key_width param is used' do
|
||||
it 'sets custom key width' do
|
||||
get_badge(badge_type, key_width: '123')
|
||||
|
||||
expect(response.body).to include('123')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#pipeline' do
|
||||
|
|
@ -118,13 +143,12 @@ describe Projects::BadgesController do
|
|||
it_behaves_like 'a badge resource', :coverage
|
||||
end
|
||||
|
||||
def get_badge(badge, style = nil)
|
||||
def get_badge(badge, args = {})
|
||||
params = {
|
||||
namespace_id: project.namespace.to_param,
|
||||
project_id: project,
|
||||
ref: pipeline.ref,
|
||||
style: style
|
||||
}
|
||||
ref: pipeline.ref
|
||||
}.merge(args.slice(:style, :key_text, :key_width))
|
||||
|
||||
get badge, params: params, format: :svg
|
||||
end
|
||||
|
|
|
|||
|
|
@ -19,6 +19,12 @@ FactoryBot.define do
|
|||
issue
|
||||
end
|
||||
|
||||
trait :with_assignee do |alert|
|
||||
after(:create) do |alert|
|
||||
alert.alert_assignees.create(assignee: create(:user))
|
||||
end
|
||||
end
|
||||
|
||||
trait :with_fingerprint do
|
||||
fingerprint { SecureRandom.hex }
|
||||
end
|
||||
|
|
@ -77,6 +83,7 @@ FactoryBot.define do
|
|||
|
||||
trait :all_fields do
|
||||
with_issue
|
||||
with_assignee
|
||||
with_fingerprint
|
||||
with_service
|
||||
with_monitoring_tool
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
FactoryBot.define do
|
||||
factory :ci_build_report_result, class: 'Ci::BuildReportResult' do
|
||||
build factory: :ci_build
|
||||
project factory: :project
|
||||
data do
|
||||
{
|
||||
junit: {
|
||||
name: "rspec",
|
||||
duration: 0.42,
|
||||
failed: 0,
|
||||
errored: 2,
|
||||
skipped: 0,
|
||||
success: 0
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
trait :with_junit_success do
|
||||
data do
|
||||
{
|
||||
junit: {
|
||||
name: "rspec",
|
||||
duration: 0.42,
|
||||
failed: 0,
|
||||
errored: 0,
|
||||
skipped: 0,
|
||||
success: 2
|
||||
}
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -8,7 +8,7 @@ import {
|
|||
GlDropdownItem,
|
||||
GlIcon,
|
||||
GlTab,
|
||||
GlDeprecatedBadge as GlBadge,
|
||||
GlBadge,
|
||||
} from '@gitlab/ui';
|
||||
import { visitUrl } from '~/lib/utils/url_utility';
|
||||
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
|
||||
|
|
@ -42,6 +42,7 @@ describe('AlertManagementList', () => {
|
|||
const findStatusFilterBadge = () => wrapper.findAll(GlBadge);
|
||||
const findDateFields = () => wrapper.findAll(TimeAgo);
|
||||
const findFirstStatusOption = () => findStatusDropdown().find(GlDropdownItem);
|
||||
const findAssignees = () => wrapper.findAll('[data-testid="assigneesField"]');
|
||||
const findSeverityFields = () => wrapper.findAll('[data-testid="severityField"]');
|
||||
const findSeverityColumnHeader = () => wrapper.findAll('th').at(0);
|
||||
|
||||
|
|
@ -235,6 +236,34 @@ describe('AlertManagementList', () => {
|
|||
).toBe('Critical');
|
||||
});
|
||||
|
||||
it('renders Unassigned when no assignee(s) present', () => {
|
||||
mountComponent({
|
||||
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
|
||||
data: { alerts: mockAlerts, alertsCount, errored: false },
|
||||
loading: false,
|
||||
});
|
||||
|
||||
expect(
|
||||
findAssignees()
|
||||
.at(0)
|
||||
.text(),
|
||||
).toBe('Unassigned');
|
||||
});
|
||||
|
||||
it('renders username(s) when assignee(s) present', () => {
|
||||
mountComponent({
|
||||
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
|
||||
data: { alerts: mockAlerts, alertsCount, errored: false },
|
||||
loading: false,
|
||||
});
|
||||
|
||||
expect(
|
||||
findAssignees()
|
||||
.at(1)
|
||||
.text(),
|
||||
).toBe(mockAlerts[1].assignees[0].username);
|
||||
});
|
||||
|
||||
it('navigates to the detail page when alert row is clicked', () => {
|
||||
mountComponent({
|
||||
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@
|
|||
"createdAt": "2020-04-17T23:18:14.996Z",
|
||||
"startedAt": "2020-04-17T23:18:14.996Z",
|
||||
"endedAt": "2020-04-17T23:18:14.996Z",
|
||||
"status": "TRIGGERED"
|
||||
"status": "TRIGGERED",
|
||||
"assignees": []
|
||||
},
|
||||
{
|
||||
"iid": "1527543",
|
||||
|
|
@ -16,7 +17,8 @@
|
|||
"eventCount": 1,
|
||||
"startedAt": "2020-04-17T23:18:14.996Z",
|
||||
"endedAt": "2020-04-17T23:18:14.996Z",
|
||||
"status": "ACKNOWLEDGED"
|
||||
"status": "ACKNOWLEDGED",
|
||||
"assignees": [{"username": "root"}]
|
||||
},
|
||||
{
|
||||
"iid": "1527544",
|
||||
|
|
@ -25,6 +27,7 @@
|
|||
"eventCount": 4,
|
||||
"startedAt": "2020-04-17T23:18:14.996Z",
|
||||
"endedAt": "2020-04-17T23:18:14.996Z",
|
||||
"status": "RESOLVED"
|
||||
"status": "RESOLVED",
|
||||
"assignees": [{"username": "root"}]
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -132,6 +132,8 @@ exports[`Dashboard template matches the default snapshot 1`] = `
|
|||
|
||||
<!---->
|
||||
|
||||
<!---->
|
||||
|
||||
<empty-state-stub
|
||||
clusterspath="/path/to/clusters"
|
||||
documentationpath="/path/to/docs"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,64 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import { GlLink } from '@gitlab/ui';
|
||||
import { createStore } from '~/monitoring/stores';
|
||||
import LinksSection from '~/monitoring/components/links_section.vue';
|
||||
|
||||
describe('Links Section component', () => {
|
||||
let store;
|
||||
let wrapper;
|
||||
|
||||
const createShallowWrapper = () => {
|
||||
wrapper = shallowMount(LinksSection, {
|
||||
store,
|
||||
});
|
||||
};
|
||||
const setState = links => {
|
||||
store.state.monitoringDashboard = {
|
||||
...store.state.monitoringDashboard,
|
||||
showEmptyState: false,
|
||||
links,
|
||||
};
|
||||
};
|
||||
const findLinks = () => wrapper.findAll(GlLink);
|
||||
|
||||
beforeEach(() => {
|
||||
store = createStore();
|
||||
createShallowWrapper();
|
||||
});
|
||||
|
||||
it('does not render a section if no links are present', () => {
|
||||
setState();
|
||||
|
||||
return wrapper.vm.$nextTick(() => {
|
||||
expect(findLinks()).not.toExist();
|
||||
});
|
||||
});
|
||||
|
||||
it('renders a link inside a section', () => {
|
||||
setState([
|
||||
{
|
||||
title: 'GitLab Website',
|
||||
url: 'https://gitlab.com',
|
||||
},
|
||||
]);
|
||||
|
||||
return wrapper.vm.$nextTick(() => {
|
||||
expect(findLinks()).toHaveLength(1);
|
||||
const firstLink = findLinks().at(0);
|
||||
|
||||
expect(firstLink.attributes('href')).toBe('https://gitlab.com');
|
||||
expect(firstLink.text()).toBe('GitLab Website');
|
||||
});
|
||||
});
|
||||
|
||||
it('renders multiple links inside a section', () => {
|
||||
const links = new Array(10)
|
||||
.fill(null)
|
||||
.map((_, i) => ({ title: `Title ${i}`, url: `https://gitlab.com/projects/${i}` }));
|
||||
setState(links);
|
||||
|
||||
return wrapper.vm.$nextTick(() => {
|
||||
expect(findLinks()).toHaveLength(10);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -24,6 +24,7 @@ describe GitlabSchema.types['AlertManagementAlert'] do
|
|||
details
|
||||
created_at
|
||||
updated_at
|
||||
assignees
|
||||
]
|
||||
|
||||
expect(described_class).to have_graphql_fields(*expected_fields)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,8 @@ describe Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder do
|
|||
let_it_be(:project) { create(:project, :empty_repo) }
|
||||
let_it_be(:mr1) { create(:merge_request, target_project: project, source_project: project, allow_broken: true, created_at: 3.months.ago) }
|
||||
let_it_be(:mr2) { create(:merge_request, target_project: project, source_project: project, allow_broken: true, created_at: 1.month.ago) }
|
||||
let(:params) { {} }
|
||||
let_it_be(:user) { create(:user) }
|
||||
let(:params) { { current_user: user } }
|
||||
let(:records) do
|
||||
stage = build(:cycle_analytics_project_stage, {
|
||||
start_event_identifier: :merge_request_created,
|
||||
|
|
@ -17,6 +18,7 @@ describe Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder do
|
|||
end
|
||||
|
||||
before do
|
||||
project.add_maintainer(user)
|
||||
mr1.metrics.update!(merged_at: 1.month.ago)
|
||||
mr2.metrics.update!(merged_at: Time.now)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
|
|||
describe '#serialized_records' do
|
||||
shared_context 'when records are loaded by maintainer' do
|
||||
before do
|
||||
project.add_user(user, Gitlab::Access::MAINTAINER)
|
||||
project.add_user(user, Gitlab::Access::DEVELOPER)
|
||||
end
|
||||
|
||||
it 'returns all records' do
|
||||
|
|
@ -103,6 +103,8 @@ describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
|
|||
latest_build_finished_at: 7.days.ago,
|
||||
pipeline: ci_build2.pipeline
|
||||
})
|
||||
|
||||
project.add_user(user, Gitlab::Access::MAINTAINER)
|
||||
end
|
||||
|
||||
context 'returns build records' do
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ describe Gitlab::Badge::Coverage::Report do
|
|||
let(:job_name) { nil }
|
||||
|
||||
let(:badge) do
|
||||
described_class.new(project, 'master', job_name)
|
||||
described_class.new(project, 'master', opts: { job: job_name })
|
||||
end
|
||||
|
||||
describe '#entity' do
|
||||
|
|
|
|||
|
|
@ -3,13 +3,33 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Badge::Coverage::Template do
|
||||
let(:badge) { double(entity: 'coverage', status: 90.00) }
|
||||
let(:badge) { double(entity: 'coverage', status: 90.00, customization: {}) }
|
||||
let(:template) { described_class.new(badge) }
|
||||
|
||||
describe '#key_text' do
|
||||
it 'is always says coverage' do
|
||||
it 'says coverage by default' do
|
||||
expect(template.key_text).to eq 'coverage'
|
||||
end
|
||||
|
||||
context 'when custom key_text is defined' do
|
||||
before do
|
||||
allow(badge).to receive(:customization).and_return({ key_text: "custom text" })
|
||||
end
|
||||
|
||||
it 'returns custom value' do
|
||||
expect(template.key_text).to eq "custom text"
|
||||
end
|
||||
|
||||
context 'when its size is larger than the max allowed value' do
|
||||
before do
|
||||
allow(badge).to receive(:customization).and_return({ key_text: 't' * 129 })
|
||||
end
|
||||
|
||||
it 'returns default value' do
|
||||
expect(template.key_text).to eq 'coverage'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#value_text' do
|
||||
|
|
@ -41,9 +61,29 @@ describe Gitlab::Badge::Coverage::Template do
|
|||
end
|
||||
|
||||
describe '#key_width' do
|
||||
it 'has a fixed key width' do
|
||||
it 'is fixed by default' do
|
||||
expect(template.key_width).to eq 62
|
||||
end
|
||||
|
||||
context 'when custom key_width is defined' do
|
||||
before do
|
||||
allow(badge).to receive(:customization).and_return({ key_width: 101 })
|
||||
end
|
||||
|
||||
it 'returns custom value' do
|
||||
expect(template.key_width).to eq 101
|
||||
end
|
||||
|
||||
context 'when it is larger than the max allowed value' do
|
||||
before do
|
||||
allow(badge).to receive(:customization).and_return({ key_width: 129 })
|
||||
end
|
||||
|
||||
it 'returns default value' do
|
||||
expect(template.key_width).to eq 62
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#value_width' do
|
||||
|
|
|
|||
|
|
@ -3,13 +3,33 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Badge::Pipeline::Template do
|
||||
let(:badge) { double(entity: 'pipeline', status: 'success') }
|
||||
let(:badge) { double(entity: 'pipeline', status: 'success', customization: {}) }
|
||||
let(:template) { described_class.new(badge) }
|
||||
|
||||
describe '#key_text' do
|
||||
it 'is always says pipeline' do
|
||||
it 'says pipeline by default' do
|
||||
expect(template.key_text).to eq 'pipeline'
|
||||
end
|
||||
|
||||
context 'when custom key_text is defined' do
|
||||
before do
|
||||
allow(badge).to receive(:customization).and_return({ key_text: 'custom text' })
|
||||
end
|
||||
|
||||
it 'returns custom value' do
|
||||
expect(template.key_text).to eq 'custom text'
|
||||
end
|
||||
|
||||
context 'when its size is larger than the max allowed value' do
|
||||
before do
|
||||
allow(badge).to receive(:customization).and_return({ key_text: 't' * 129 })
|
||||
end
|
||||
|
||||
it 'returns default value' do
|
||||
expect(template.key_text).to eq 'pipeline'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#value_text' do
|
||||
|
|
@ -18,6 +38,32 @@ describe Gitlab::Badge::Pipeline::Template do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#key_width' do
|
||||
it 'is fixed by default' do
|
||||
expect(template.key_width).to eq 62
|
||||
end
|
||||
|
||||
context 'when custom key_width is defined' do
|
||||
before do
|
||||
allow(badge).to receive(:customization).and_return({ key_width: 101 })
|
||||
end
|
||||
|
||||
it 'returns custom value' do
|
||||
expect(template.key_width).to eq 101
|
||||
end
|
||||
|
||||
context 'when it is larger than the max allowed value' do
|
||||
before do
|
||||
allow(badge).to receive(:customization).and_return({ key_width: 129 })
|
||||
end
|
||||
|
||||
it 'returns default value' do
|
||||
expect(template.key_width).to eq 62
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'widths and text anchors' do
|
||||
it 'has fixed width and text anchors' do
|
||||
expect(template.width).to eq 116
|
||||
|
|
|
|||
|
|
@ -2,17 +2,19 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Database::PartitioningMigrationHelpers do
|
||||
describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers do
|
||||
let(:model) do
|
||||
ActiveRecord::Migration.new.extend(described_class)
|
||||
end
|
||||
let_it_be(:connection) { ActiveRecord::Base.connection }
|
||||
let(:referenced_table) { :issues }
|
||||
let(:function_name) { model.fk_function_name(referenced_table) }
|
||||
let(:trigger_name) { model.fk_trigger_name(referenced_table) }
|
||||
let(:function_name) { '_test_partitioned_foreign_keys_function' }
|
||||
let(:trigger_name) { '_test_partitioned_foreign_keys_trigger' }
|
||||
|
||||
before do
|
||||
allow(model).to receive(:puts)
|
||||
allow(model).to receive(:fk_function_name).and_return(function_name)
|
||||
allow(model).to receive(:fk_trigger_name).and_return(trigger_name)
|
||||
end
|
||||
|
||||
describe 'adding a foreign key' do
|
||||
|
|
@ -0,0 +1,135 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers do
|
||||
include PartitioningHelpers
|
||||
|
||||
let(:model) do
|
||||
ActiveRecord::Migration.new.extend(described_class)
|
||||
end
|
||||
|
||||
let_it_be(:connection) { ActiveRecord::Base.connection }
|
||||
let(:template_table) { :audit_events }
|
||||
let(:partitioned_table) { '_test_migration_partitioned_table' }
|
||||
let(:partition_column) { 'created_at' }
|
||||
let(:min_date) { Date.new(2019, 12) }
|
||||
let(:max_date) { Date.new(2020, 3) }
|
||||
|
||||
before do
|
||||
allow(model).to receive(:puts)
|
||||
allow(model).to receive(:partitioned_table_name).and_return(partitioned_table)
|
||||
end
|
||||
|
||||
describe '#partition_table_by_date' do
|
||||
let(:old_primary_key) { 'id' }
|
||||
let(:new_primary_key) { [old_primary_key, partition_column] }
|
||||
|
||||
context 'when the the max_date is less than the min_date' do
|
||||
let(:max_date) { Time.utc(2019, 6) }
|
||||
|
||||
it 'raises an error' do
|
||||
expect do
|
||||
model.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
|
||||
end.to raise_error(/max_date #{max_date} must be greater than min_date #{min_date}/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the max_date is equal to the min_date' do
|
||||
let(:max_date) { min_date }
|
||||
|
||||
it 'raises an error' do
|
||||
expect do
|
||||
model.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
|
||||
end.to raise_error(/max_date #{max_date} must be greater than min_date #{min_date}/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the given table does not have a primary key' do
|
||||
let(:template_table) { :_partitioning_migration_helper_test_table }
|
||||
let(:partition_column) { :some_field }
|
||||
|
||||
it 'raises an error' do
|
||||
model.create_table template_table, id: false do |t|
|
||||
t.integer :id
|
||||
t.datetime partition_column
|
||||
end
|
||||
|
||||
expect do
|
||||
model.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
|
||||
end.to raise_error(/primary key not defined for #{template_table}/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an invalid partition column is given' do
|
||||
let(:partition_column) { :_this_is_not_real }
|
||||
|
||||
it 'raises an error' do
|
||||
expect do
|
||||
model.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
|
||||
end.to raise_error(/partition column #{partition_column} does not exist/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a valid source table and partition column is given' do
|
||||
it 'creates a table partitioned by the proper column' do
|
||||
model.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
|
||||
|
||||
expect(connection.table_exists?(partitioned_table)).to be(true)
|
||||
expect(connection.primary_key(partitioned_table)).to eq(new_primary_key)
|
||||
|
||||
expect_table_partitioned_by(partitioned_table, [partition_column])
|
||||
end
|
||||
|
||||
it 'removes the default from the primary key column' do
|
||||
model.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
|
||||
|
||||
pk_column = connection.columns(partitioned_table).find { |c| c.name == old_primary_key }
|
||||
|
||||
expect(pk_column.default_function).to be_nil
|
||||
end
|
||||
|
||||
it 'creates the partitioned table with the same non-key columns' do
|
||||
model.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
|
||||
|
||||
copied_columns = filter_columns_by_name(connection.columns(partitioned_table), new_primary_key)
|
||||
original_columns = filter_columns_by_name(connection.columns(template_table), new_primary_key)
|
||||
|
||||
expect(copied_columns).to match_array(original_columns)
|
||||
end
|
||||
|
||||
it 'creates a partition spanning over each month in the range given' do
|
||||
model.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
|
||||
|
||||
expect_range_partition_of("#{partitioned_table}_000000", partitioned_table, 'MINVALUE', "'2019-12-01 00:00:00'")
|
||||
expect_range_partition_of("#{partitioned_table}_201912", partitioned_table, "'2019-12-01 00:00:00'", "'2020-01-01 00:00:00'")
|
||||
expect_range_partition_of("#{partitioned_table}_202001", partitioned_table, "'2020-01-01 00:00:00'", "'2020-02-01 00:00:00'")
|
||||
expect_range_partition_of("#{partitioned_table}_202002", partitioned_table, "'2020-02-01 00:00:00'", "'2020-03-01 00:00:00'")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#drop_partitioned_table_for' do
|
||||
let(:expected_tables) do
|
||||
%w[000000 201912 202001 202002].map { |suffix| "#{partitioned_table}_#{suffix}" }.unshift(partitioned_table)
|
||||
end
|
||||
|
||||
it 'drops the partitioned copy and all partitions' do
|
||||
model.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
|
||||
|
||||
expected_tables.each do |table|
|
||||
expect(connection.table_exists?(table)).to be(true)
|
||||
end
|
||||
|
||||
model.drop_partitioned_table_for template_table
|
||||
|
||||
expected_tables.each do |table|
|
||||
expect(connection.table_exists?(table)).to be(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def filter_columns_by_name(columns, names)
|
||||
columns.reject { |c| names.include?(c.name) }
|
||||
end
|
||||
end
|
||||
|
|
@ -495,6 +495,7 @@ project:
|
|||
- repository_storage_moves
|
||||
- freeze_periods
|
||||
- webex_teams_service
|
||||
- build_report_results
|
||||
award_emoji:
|
||||
- awardable
|
||||
- user
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe AlertManagement::AlertAssignee do
|
||||
describe 'associations' do
|
||||
it { is_expected.to belong_to(:alert) }
|
||||
it { is_expected.to belong_to(:assignee) }
|
||||
end
|
||||
|
||||
describe 'validations' do
|
||||
let(:alert) { create(:alert_management_alert) }
|
||||
let(:user) { create(:user) }
|
||||
|
||||
subject { alert.alert_assignees.build(assignee: user) }
|
||||
|
||||
it { is_expected.to validate_presence_of(:alert) }
|
||||
it { is_expected.to validate_presence_of(:assignee) }
|
||||
it { is_expected.to validate_uniqueness_of(:assignee).scoped_to(:alert_id) }
|
||||
end
|
||||
end
|
||||
|
|
@ -6,6 +6,7 @@ describe AlertManagement::Alert do
|
|||
describe 'associations' do
|
||||
it { is_expected.to belong_to(:project) }
|
||||
it { is_expected.to belong_to(:issue) }
|
||||
it { is_expected.to have_many(:assignees).through(:alert_assignees) }
|
||||
end
|
||||
|
||||
describe 'validations' do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Ci::BuildReportResult do
|
||||
let(:build_report_result) { build(:ci_build_report_result, :with_junit_success) }
|
||||
|
||||
describe 'associations' do
|
||||
it { is_expected.to belong_to(:build) }
|
||||
it { is_expected.to belong_to(:project) }
|
||||
end
|
||||
|
||||
describe 'validations' do
|
||||
it { is_expected.to validate_presence_of(:project) }
|
||||
it { is_expected.to validate_presence_of(:build) }
|
||||
|
||||
context 'when attributes are valid' do
|
||||
it 'returns no errors' do
|
||||
expect(build_report_result).to be_valid
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data is invalid' do
|
||||
it 'returns errors' do
|
||||
build_report_result.data = { invalid: 'data' }
|
||||
|
||||
expect(build_report_result).to be_invalid
|
||||
expect(build_report_result.errors.full_messages).to eq(["Data must be a valid json schema"])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -24,6 +24,7 @@ describe Ci::Build do
|
|||
it { is_expected.to have_many(:needs) }
|
||||
it { is_expected.to have_many(:sourced_pipelines) }
|
||||
it { is_expected.to have_many(:job_variables) }
|
||||
it { is_expected.to have_many(:report_results) }
|
||||
|
||||
it { is_expected.to have_one(:deployment) }
|
||||
it { is_expected.to have_one(:runner_session) }
|
||||
|
|
|
|||
|
|
@ -79,6 +79,7 @@ describe Project do
|
|||
it { is_expected.to have_many(:ci_refs) }
|
||||
it { is_expected.to have_many(:builds) }
|
||||
it { is_expected.to have_many(:build_trace_section_names)}
|
||||
it { is_expected.to have_many(:build_report_results) }
|
||||
it { is_expected.to have_many(:runner_projects) }
|
||||
it { is_expected.to have_many(:runners) }
|
||||
it { is_expected.to have_many(:variables) }
|
||||
|
|
|
|||
|
|
@ -75,6 +75,8 @@ describe 'getting Alert Management Alerts' do
|
|||
'updatedAt' => triggered_alert.updated_at.strftime('%Y-%m-%dT%H:%M:%SZ')
|
||||
)
|
||||
|
||||
expect(first_alert['assignees'].first).to include('username' => triggered_alert.assignees.first.username)
|
||||
|
||||
expect(second_alert).to include(
|
||||
'iid' => resolved_alert.iid.to_s,
|
||||
'issueIid' => nil,
|
||||
|
|
|
|||
|
|
@ -5,61 +5,15 @@ require 'spec_helper'
|
|||
describe API::ProjectRepositoryStorageMoves do
|
||||
include AccessMatchersForRequest
|
||||
|
||||
let(:user) { create(:admin) }
|
||||
let!(:storage_move) { create(:project_repository_storage_move, :scheduled) }
|
||||
let_it_be(:user) { create(:admin) }
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:storage_move) { create(:project_repository_storage_move, :scheduled, project: project) }
|
||||
|
||||
describe 'GET /project_repository_storage_moves' do
|
||||
def get_project_repository_storage_moves
|
||||
get api('/project_repository_storage_moves', user)
|
||||
end
|
||||
|
||||
it 'returns project repository storage moves' do
|
||||
get_project_repository_storage_moves
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to include_pagination_headers
|
||||
expect(response).to match_response_schema('public_api/v4/project_repository_storage_moves')
|
||||
expect(json_response.size).to eq(1)
|
||||
expect(json_response.first['id']).to eq(storage_move.id)
|
||||
expect(json_response.first['state']).to eq(storage_move.human_state_name)
|
||||
end
|
||||
|
||||
it 'avoids N+1 queries', :request_store do
|
||||
# prevent `let` from polluting the control
|
||||
get_project_repository_storage_moves
|
||||
|
||||
control = ActiveRecord::QueryRecorder.new { get_project_repository_storage_moves }
|
||||
|
||||
create(:project_repository_storage_move, :scheduled)
|
||||
|
||||
expect { get_project_repository_storage_moves }.not_to exceed_query_limit(control)
|
||||
end
|
||||
|
||||
it 'returns the most recently created first' do
|
||||
storage_move_oldest = create(:project_repository_storage_move, :scheduled, created_at: 2.days.ago)
|
||||
storage_move_middle = create(:project_repository_storage_move, :scheduled, created_at: 1.day.ago)
|
||||
|
||||
get api('/project_repository_storage_moves', user)
|
||||
|
||||
json_ids = json_response.map {|storage_move| storage_move['id'] }
|
||||
expect(json_ids).to eq([
|
||||
storage_move.id,
|
||||
storage_move_middle.id,
|
||||
storage_move_oldest.id
|
||||
])
|
||||
end
|
||||
|
||||
describe 'permissions' do
|
||||
it { expect { get_project_repository_storage_moves }.to be_allowed_for(:admin) }
|
||||
it { expect { get_project_repository_storage_moves }.to be_denied_for(:user) }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET /project_repository_storage_moves/:id' do
|
||||
shared_examples 'get single project repository storage move' do
|
||||
let(:project_repository_storage_move_id) { storage_move.id }
|
||||
|
||||
def get_project_repository_storage_move
|
||||
get api("/project_repository_storage_moves/#{project_repository_storage_move_id}", user)
|
||||
get api(url, user)
|
||||
end
|
||||
|
||||
it 'returns a project repository storage move' do
|
||||
|
|
@ -86,4 +40,75 @@ describe API::ProjectRepositoryStorageMoves do
|
|||
it { expect { get_project_repository_storage_move }.to be_denied_for(:user) }
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'get project repository storage move list' do
|
||||
def get_project_repository_storage_moves
|
||||
get api(url, user)
|
||||
end
|
||||
|
||||
it 'returns project repository storage moves' do
|
||||
get_project_repository_storage_moves
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to include_pagination_headers
|
||||
expect(response).to match_response_schema('public_api/v4/project_repository_storage_moves')
|
||||
expect(json_response.size).to eq(1)
|
||||
expect(json_response.first['id']).to eq(storage_move.id)
|
||||
expect(json_response.first['state']).to eq(storage_move.human_state_name)
|
||||
end
|
||||
|
||||
it 'avoids N+1 queries', :request_store do
|
||||
# prevent `let` from polluting the control
|
||||
get_project_repository_storage_moves
|
||||
|
||||
control = ActiveRecord::QueryRecorder.new { get_project_repository_storage_moves }
|
||||
|
||||
create(:project_repository_storage_move, :scheduled, project: project)
|
||||
|
||||
expect { get_project_repository_storage_moves }.not_to exceed_query_limit(control)
|
||||
end
|
||||
|
||||
it 'returns the most recently created first' do
|
||||
storage_move_oldest = create(:project_repository_storage_move, :scheduled, project: project, created_at: 2.days.ago)
|
||||
storage_move_middle = create(:project_repository_storage_move, :scheduled, project: project, created_at: 1.day.ago)
|
||||
|
||||
get_project_repository_storage_moves
|
||||
|
||||
json_ids = json_response.map {|storage_move| storage_move['id'] }
|
||||
expect(json_ids).to eq([
|
||||
storage_move.id,
|
||||
storage_move_middle.id,
|
||||
storage_move_oldest.id
|
||||
])
|
||||
end
|
||||
|
||||
describe 'permissions' do
|
||||
it { expect { get_project_repository_storage_moves }.to be_allowed_for(:admin) }
|
||||
it { expect { get_project_repository_storage_moves }.to be_denied_for(:user) }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET /project_repository_storage_moves' do
|
||||
it_behaves_like 'get project repository storage move list' do
|
||||
let(:url) { '/project_repository_storage_moves' }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET /project_repository_storage_moves/:repository_storage_move_id' do
|
||||
it_behaves_like 'get single project repository storage move' do
|
||||
let(:url) { "/project_repository_storage_moves/#{project_repository_storage_move_id}" }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET /projects/:id/repository_storage_moves' do
|
||||
it_behaves_like 'get project repository storage move list' do
|
||||
let(:url) { "/projects/#{project.id}/repository_storage_moves" }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET /projects/:id/repository_storage_moves/:repository_storage_move_id' do
|
||||
it_behaves_like 'get single project repository storage move' do
|
||||
let(:url) { "/projects/#{project.id}/repository_storage_moves/#{project_repository_storage_move_id}" }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ describe Ci::RetryBuildService do
|
|||
metadata runner_session trace_chunks upstream_pipeline_id
|
||||
artifacts_file artifacts_metadata artifacts_size commands
|
||||
resource resource_group_id processed security_scans author
|
||||
pipeline_id].freeze
|
||||
pipeline_id report_results].freeze
|
||||
|
||||
shared_examples 'build duplication' do
|
||||
let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,77 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe JiraImport::UsersImporter do
|
||||
include JiraServiceHelper
|
||||
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:project, reload: true) { create(:project) }
|
||||
let_it_be(:start_at) { 7 }
|
||||
|
||||
let(:importer) { described_class.new(user, project, start_at) }
|
||||
|
||||
subject { importer.execute }
|
||||
|
||||
describe '#execute' do
|
||||
before do
|
||||
stub_jira_service_test
|
||||
project.add_maintainer(user)
|
||||
end
|
||||
|
||||
context 'when Jira import is not configured properly' do
|
||||
it 'raises an error' do
|
||||
expect { subject }.to raise_error(Projects::ImportService::Error)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Jira import is configured correctly' do
|
||||
let_it_be(:jira_service) { create(:jira_service, project: project, active: true) }
|
||||
let(:client) { double }
|
||||
|
||||
before do
|
||||
expect(importer).to receive(:client).and_return(client)
|
||||
end
|
||||
|
||||
context 'when jira client raises an error' do
|
||||
it 'returns an error response' do
|
||||
expect(client).to receive(:get).and_raise(Timeout::Error)
|
||||
|
||||
expect(subject.error?).to be_truthy
|
||||
expect(subject.message).to include('There was an error when communicating to Jira')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when jira client returns result' do
|
||||
before do
|
||||
allow(client).to receive(:get).with('/rest/api/2/users?maxResults=50&startAt=7')
|
||||
.and_return(jira_users)
|
||||
end
|
||||
|
||||
context 'when jira client returns an empty array' do
|
||||
let(:jira_users) { [] }
|
||||
|
||||
it 'retturns nil payload' do
|
||||
expect(subject.success?).to be_truthy
|
||||
expect(subject.payload).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when jira client returns an results' do
|
||||
let(:jira_users) { [{ 'name' => 'user1' }, { 'name' => 'user2' }] }
|
||||
let(:mapped_users) { [{ jira_display_name: 'user1', gitlab_id: 5 }] }
|
||||
|
||||
before do
|
||||
expect(JiraImport::UsersMapper).to receive(:new).with(project, jira_users)
|
||||
.and_return(double(execute: mapped_users))
|
||||
end
|
||||
|
||||
it 'returns the mapped users' do
|
||||
expect(subject.success?).to be_truthy
|
||||
expect(subject.payload).to eq(mapped_users)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe JiraImport::UsersMapper do
|
||||
let_it_be(:project) { create(:project) }
|
||||
|
||||
subject { described_class.new(project, jira_users).execute }
|
||||
|
||||
describe '#execute' do
|
||||
context 'jira_users is nil' do
|
||||
let(:jira_users) { nil }
|
||||
|
||||
it 'returns an empty array' do
|
||||
expect(subject).to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
context 'when jira_users is present' do
|
||||
let(:jira_users) do
|
||||
[
|
||||
{ 'accountId' => 'abcd', 'displayName' => 'user1' },
|
||||
{ 'accountId' => 'efg' },
|
||||
{ 'accountId' => 'hij', 'displayName' => 'user3', 'emailAddress' => 'user3@example.com' }
|
||||
]
|
||||
end
|
||||
|
||||
# TODO: now we only create an array in a proper format
|
||||
# mapping is tracked in https://gitlab.com/gitlab-org/gitlab/-/issues/219023
|
||||
let(:mapped_users) do
|
||||
[
|
||||
{ jira_account_id: 'abcd', jira_display_name: 'user1', jira_email: nil, gitlab_id: nil },
|
||||
{ jira_account_id: 'efg', jira_display_name: nil, jira_email: nil, gitlab_id: nil },
|
||||
{ jira_account_id: 'hij', jira_display_name: 'user3', jira_email: 'user3@example.com', gitlab_id: nil }
|
||||
]
|
||||
end
|
||||
|
||||
it 'returns users mapped to Gitlab' do
|
||||
expect(subject).to eq(mapped_users)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module PartitioningHelpers
|
||||
def expect_table_partitioned_by(table, columns, part_type: :range)
|
||||
columns_with_part_type = columns.map { |c| [part_type.to_s, c] }
|
||||
actual_columns = find_partitioned_columns(table)
|
||||
|
||||
expect(columns_with_part_type).to match_array(actual_columns)
|
||||
end
|
||||
|
||||
def expect_range_partition_of(partition_name, table_name, min_value, max_value)
|
||||
definition = find_partition_definition(partition_name)
|
||||
|
||||
expect(definition).not_to be_nil
|
||||
expect(definition['base_table']).to eq(table_name.to_s)
|
||||
expect(definition['condition']).to eq("FOR VALUES FROM (#{min_value}) TO (#{max_value})")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def find_partitioned_columns(table)
|
||||
connection.select_rows(<<~SQL)
|
||||
select
|
||||
case partstrat
|
||||
when 'l' then 'list'
|
||||
when 'r' then 'range'
|
||||
when 'h' then 'hash'
|
||||
end as partstrat,
|
||||
cols.column_name
|
||||
from (
|
||||
select partrelid, partstrat, unnest(partattrs) as col_pos
|
||||
from pg_partitioned_table
|
||||
) pg_part
|
||||
inner join pg_class
|
||||
on pg_part.partrelid = pg_class.oid
|
||||
inner join information_schema.columns cols
|
||||
on cols.table_name = pg_class.relname
|
||||
and cols.ordinal_position = pg_part.col_pos
|
||||
where pg_class.relname = '#{table}';
|
||||
SQL
|
||||
end
|
||||
|
||||
def find_partition_definition(partition)
|
||||
connection.select_one(<<~SQL)
|
||||
select
|
||||
parent_class.relname as base_table,
|
||||
pg_get_expr(pg_class.relpartbound, inhrelid) as condition
|
||||
from pg_class
|
||||
inner join pg_inherits i on pg_class.oid = inhrelid
|
||||
inner join pg_class parent_class on parent_class.oid = inhparent
|
||||
where pg_class.relname = '#{partition}' and pg_class.relispartition;
|
||||
SQL
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe JsonSchemaValidator do
|
||||
describe '#validates_each' do
|
||||
let(:build_report_result) { build(:ci_build_report_result, :with_junit_success) }
|
||||
|
||||
subject { validator.validate(build_report_result) }
|
||||
|
||||
context 'when file_path is set' do
|
||||
let(:validator) { described_class.new(attributes: [:data], filename: "build_report_result_data") }
|
||||
|
||||
context 'when data is valid' do
|
||||
it 'returns no errors' do
|
||||
subject
|
||||
|
||||
expect(build_report_result.errors).to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data is invalid' do
|
||||
it 'returns json schema is invalid' do
|
||||
build_report_result.data = { invalid: 'data' }
|
||||
|
||||
validator.validate(build_report_result)
|
||||
|
||||
expect(build_report_result.errors.size).to eq(1)
|
||||
expect(build_report_result.errors.full_messages).to eq(["Data must be a valid json schema"])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when file_path is not set' do
|
||||
let(:validator) { described_class.new(attributes: [:data]) }
|
||||
|
||||
it 'raises an ArgumentError' do
|
||||
expect { subject }.to raise_error(ArgumentError)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
Loading…
Reference in New Issue