Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-08-07 06:09:47 +00:00
parent 2efabf1a72
commit 79c0f578e4
33 changed files with 421 additions and 73 deletions

View File

@ -39,7 +39,7 @@ module Projects
render json: serialize_as_json(@alert)
else
head :no_content
head :bad_request
end
end
@ -49,7 +49,7 @@ module Projects
render json: serialize_as_json(alert)
else
head :no_content
head :bad_request
end
end
@ -59,14 +59,14 @@ module Projects
head :ok
else
head :no_content
head :bad_request
end
end
private
def alerts_params
params.permit(:operator, :threshold, :environment_id, :prometheus_metric_id)
params.permit(:operator, :threshold, :environment_id, :prometheus_metric_id, :runbook_url)
end
def notify_service

View File

@ -83,6 +83,7 @@ module Ci
has_many :daily_build_group_report_results, class_name: 'Ci::DailyBuildGroupReportResult', foreign_key: :last_pipeline_id
has_many :latest_builds_report_results, through: :latest_builds, source: :report_results
has_many :pipeline_artifacts, class_name: 'Ci::PipelineArtifact', inverse_of: :pipeline, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
accepts_nested_attributes_for :variables, reject_if: :persisted?

View File

@ -0,0 +1,34 @@
# frozen_string_literal: true
# This class is being used to persist generated report consumable by gitlab frontend in a pipeline context.
module Ci
class PipelineArtifact < ApplicationRecord
extend Gitlab::Ci::Model
FILE_STORE_SUPPORTED = [
ObjectStorage::Store::LOCAL,
ObjectStorage::Store::REMOTE
].freeze
FILE_SIZE_LIMIT = 10.megabytes.freeze
belongs_to :project, class_name: "Project", inverse_of: :pipeline_artifacts
belongs_to :pipeline, class_name: "Ci::Pipeline", inverse_of: :pipeline_artifacts
validates :pipeline, :project, :file_format, presence: true
validates :file_store, presence: true, inclusion: { in: FILE_STORE_SUPPORTED }
validates :size, presence: true, numericality: { less_than_or_equal_to: FILE_SIZE_LIMIT }
validates :file_type, presence: true, uniqueness: { scope: [:pipeline_id] }
enum file_type: {
code_coverage: 1
}
enum file_format: {
raw: 1,
zip: 2,
gzip: 3
}, _suffix: true
end
end

View File

@ -299,6 +299,7 @@ class Project < ApplicationRecord
has_many :build_trace_chunks, class_name: 'Ci::BuildTraceChunk', through: :builds, source: :trace_chunks
has_many :build_report_results, class_name: 'Ci::BuildReportResult', inverse_of: :project
has_many :job_artifacts, class_name: 'Ci::JobArtifact'
has_many :pipeline_artifacts, class_name: 'Ci::PipelineArtifact', inverse_of: :project
has_many :runner_projects, class_name: 'Ci::RunnerProject', inverse_of: :project
has_many :runners, through: :runner_projects, source: :runner, class_name: 'Ci::Runner'
has_many :variables, class_name: 'Ci::Variable'

View File

@ -22,6 +22,8 @@ class PrometheusAlert < ApplicationRecord
after_destroy :clear_prometheus_adapter_cache!
validates :environment, :project, :prometheus_metric, presence: true
validates :runbook_url, length: { maximum: 255 }, allow_blank: true,
addressable_url: { enforce_sanitization: true, ascii_only: true }
validate :require_valid_environment_project!
validate :require_valid_metric_project!
@ -59,6 +61,9 @@ class PrometheusAlert < ApplicationRecord
"gitlab" => "hook",
"gitlab_alert_id" => prometheus_metric_id,
"gitlab_prometheus_alert_id" => id
},
"annotations" => {
"runbook" => runbook_url
}
}
end

View File

@ -7,6 +7,7 @@ class PrometheusAlertEntity < Grape::Entity
expose :title
expose :query
expose :threshold
expose :runbook_url
expose :operator do |prometheus_alert|
prometheus_alert.computed_operator

View File

@ -0,0 +1,5 @@
---
title: Add PipelineArtifact data model
merge_request: 37969
author:
type: performance

View File

@ -0,0 +1,5 @@
---
title: Add support for runbook url to PrometheusAlert table
merge_request: 38234
author:
type: added

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class AddRunbookToPrometheusAlert < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
# limit is added in 20200501000002_add_text_limit_to_sprints_extended_title
add_column :prometheus_alerts, :runbook_url, :text # rubocop:disable Migration/AddLimitToTextColumns
end
def down
remove_column :prometheus_alerts, :runbook_url
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddTextLimitToRunbookOnPrometheusAlerts < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_text_limit :prometheus_alerts, :runbook_url, 255
end
def down
remove_text_limit :prometheus_alerts, :runbook_url
end
end

View File

@ -0,0 +1,32 @@
# frozen_string_literal: true
class CreateCiPipelineArtifact < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
unless table_exists?(:ci_pipeline_artifacts)
create_table :ci_pipeline_artifacts do |t|
t.timestamps_with_timezone
t.bigint :pipeline_id, null: false, index: true
t.bigint :project_id, null: false, index: true
t.integer :size, null: false
t.integer :file_store, null: false, limit: 2
t.integer :file_type, null: false, limit: 2
t.integer :file_format, null: false, limit: 2
t.text :file
t.index [:pipeline_id, :file_type], unique: true
end
end
add_text_limit :ci_pipeline_artifacts, :file, 255
end
def down
drop_table :ci_pipeline_artifacts
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddForeignKeyToPipelineIdOnPipelineArtifact < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_foreign_key :ci_pipeline_artifacts, :ci_pipelines, column: :pipeline_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
end
end
def down
with_lock_retries do
remove_foreign_key :ci_pipeline_artifacts, column: :pipeline_id
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddForeignKeyToProjectIdOnPipelineArtifact < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_foreign_key :ci_pipeline_artifacts, :projects, column: :project_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
end
end
def down
with_lock_retries do
remove_foreign_key :ci_pipeline_artifacts, column: :project_id
end
end
end

View File

@ -0,0 +1 @@
34d53fcb98b82f3da7eeacd7bfabfd4118b51c448418f20227f7d5b05a0077dc

View File

@ -0,0 +1 @@
a1a896fc3fe060b34da9cde46ac0ddb8b973d077bcae3bb36677f9d02d2a3509

View File

@ -0,0 +1 @@
983fa89c574e41a7d38eb558779f46d6d42aee5fba92f4185307e1508c401298

View File

@ -0,0 +1 @@
8fdb1e994ca7a28f7e061fb80cf210c482bafbe2bd0dc19c631c8fe9e0e2bbaf

View File

@ -0,0 +1 @@
e992135d6a4d10224b7e3deb304790735b6a35b5fb320670f9a7029e2924efb5

View File

@ -10047,6 +10047,29 @@ CREATE SEQUENCE public.ci_job_variables_id_seq
ALTER SEQUENCE public.ci_job_variables_id_seq OWNED BY public.ci_job_variables.id;
CREATE TABLE public.ci_pipeline_artifacts (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
pipeline_id bigint NOT NULL,
project_id bigint NOT NULL,
size integer NOT NULL,
file_store smallint NOT NULL,
file_type smallint NOT NULL,
file_format smallint NOT NULL,
file text,
CONSTRAINT check_191b5850ec CHECK ((char_length(file) <= 255))
);
CREATE SEQUENCE public.ci_pipeline_artifacts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.ci_pipeline_artifacts_id_seq OWNED BY public.ci_pipeline_artifacts.id;
CREATE TABLE public.ci_pipeline_chat_data (
id bigint NOT NULL,
pipeline_id integer NOT NULL,
@ -14637,7 +14660,9 @@ CREATE TABLE public.prometheus_alerts (
operator integer NOT NULL,
environment_id integer NOT NULL,
project_id integer NOT NULL,
prometheus_metric_id integer NOT NULL
prometheus_metric_id integer NOT NULL,
runbook_url text,
CONSTRAINT check_cb76d7e629 CHECK ((char_length(runbook_url) <= 255))
);
CREATE SEQUENCE public.prometheus_alerts_id_seq
@ -16708,6 +16733,8 @@ ALTER TABLE ONLY public.ci_job_artifacts ALTER COLUMN id SET DEFAULT nextval('pu
ALTER TABLE ONLY public.ci_job_variables ALTER COLUMN id SET DEFAULT nextval('public.ci_job_variables_id_seq'::regclass);
ALTER TABLE ONLY public.ci_pipeline_artifacts ALTER COLUMN id SET DEFAULT nextval('public.ci_pipeline_artifacts_id_seq'::regclass);
ALTER TABLE ONLY public.ci_pipeline_chat_data ALTER COLUMN id SET DEFAULT nextval('public.ci_pipeline_chat_data_id_seq'::regclass);
ALTER TABLE ONLY public.ci_pipeline_messages ALTER COLUMN id SET DEFAULT nextval('public.ci_pipeline_messages_id_seq'::regclass);
@ -17649,6 +17676,9 @@ ALTER TABLE ONLY public.ci_job_artifacts
ALTER TABLE ONLY public.ci_job_variables
ADD CONSTRAINT ci_job_variables_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.ci_pipeline_artifacts
ADD CONSTRAINT ci_pipeline_artifacts_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.ci_pipeline_chat_data
ADD CONSTRAINT ci_pipeline_chat_data_pkey PRIMARY KEY (id);
@ -19109,6 +19139,12 @@ CREATE INDEX index_ci_job_variables_on_job_id ON public.ci_job_variables USING b
CREATE UNIQUE INDEX index_ci_job_variables_on_key_and_job_id ON public.ci_job_variables USING btree (key, job_id);
CREATE INDEX index_ci_pipeline_artifacts_on_pipeline_id ON public.ci_pipeline_artifacts USING btree (pipeline_id);
CREATE UNIQUE INDEX index_ci_pipeline_artifacts_on_pipeline_id_and_file_type ON public.ci_pipeline_artifacts USING btree (pipeline_id, file_type);
CREATE INDEX index_ci_pipeline_artifacts_on_project_id ON public.ci_pipeline_artifacts USING btree (project_id);
CREATE INDEX index_ci_pipeline_chat_data_on_chat_name_id ON public.ci_pipeline_chat_data USING btree (chat_name_id);
CREATE UNIQUE INDEX index_ci_pipeline_chat_data_on_pipeline_id ON public.ci_pipeline_chat_data USING btree (pipeline_id);
@ -22140,6 +22176,9 @@ ALTER TABLE ONLY public.vulnerability_feedback
ALTER TABLE ONLY public.user_custom_attributes
ADD CONSTRAINT fk_rails_47b91868a8 FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.ci_pipeline_artifacts
ADD CONSTRAINT fk_rails_4a70390ca6 FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.group_deletion_schedules
ADD CONSTRAINT fk_rails_4b8c694a6c FOREIGN KEY (group_id) REFERENCES public.namespaces(id) ON DELETE CASCADE;
@ -22599,6 +22638,9 @@ ALTER TABLE ONLY public.resource_milestone_events
ALTER TABLE ONLY public.term_agreements
ADD CONSTRAINT fk_rails_a88721bcdf FOREIGN KEY (term_id) REFERENCES public.application_setting_terms(id);
ALTER TABLE ONLY public.ci_pipeline_artifacts
ADD CONSTRAINT fk_rails_a9e811a466 FOREIGN KEY (pipeline_id) REFERENCES public.ci_pipelines(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.merge_request_user_mentions
ADD CONSTRAINT fk_rails_aa1b2961b1 FOREIGN KEY (merge_request_id) REFERENCES public.merge_requests(id) ON DELETE CASCADE;

View File

@ -214,13 +214,13 @@ which will help you get your starting configuration correct.
If you are seeing two pipelines when using `only/except`, please see the caveats
related to using `only/except` above (or, consider moving to `rules`).
It is not possible to run a job for branch pipelines first, then only for merge request
pipelines after the merge request is created (skipping the duplicate branch pipeline). See
the [related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/201845) for more details.
### Two pipelines created when pushing an invalid CI configuration file
Pushing to a branch with an invalid CI configuration file can trigger
the creation of two types of failed pipelines. One pipeline is a failed merge request
pipeline, and the other is a failed branch pipeline, but both are caused by the same
invalid configuration.
In rare cases, duplicate pipelines are created.
See [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/201845) for details.

View File

@ -1250,6 +1250,10 @@ are permitted. Allowing only merge request pipelines, or only branch pipelines,
eliminates duplicated pipelines. Alternatively, you can rewrite the rules to be
stricter, or avoid using a final `when` (`always`, `on_success` or `delayed`).
It is not possible to run a job for branch pipelines first, then only for merge request
pipelines after the merge request is created (skipping the duplicate branch pipeline). See
the [related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/201845) for more details.
Also, we don't recommend mixing `only/except` jobs with `rules` jobs in the same pipeline.
It may not cause YAML errors, but debugging the exact execution behavior can be complex
due to the different default behaviors of `only/except` and `rules`.

View File

@ -1,28 +0,0 @@
# Elasticsearch for paid tiers on GitLab.com
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/220246) in GitLab 13.2
> - It's deployed behind a feature flag, disabled by default.
> - It's disabled on GitLab.com.
> - It's not recommended for use in GitLab self-managed instances.
This document describes how to enable Elasticsearch with GitLab for all paid tiers on GitLab.com. Once enabled,
all paid tiers will have access to the [Advanced Global Search feature](../../integration/elasticsearch.md) on GitLab.com.
## Enable or disable Elasticsearch for all paid tiers on GitLab.com
Since we're still in the process of rolling this out and want to control the timing this is behind a feature flag
which defaults to off.
To enable it:
```ruby
# Instance-wide
Feature.enable(:elasticsearch_index_only_paid_groups)
```
To disable it:
```ruby
# Instance-wide
Feature.disable(:elasticsearch_index_only_paid_groups)
```

View File

@ -27,19 +27,22 @@ module QA
end
def replicated?(project_id)
replicas = wait_until_shell_command(%(docker exec gitlab-gitaly-ha bash -c 'gitlab-rake "gitlab:praefect:replicas[#{project_id}]"')) do |line|
QA::Runtime::Logger.debug(line.chomp)
# The output of the rake task looks something like this:
#
# Project name | gitaly1 (primary) | gitaly2 | gitaly3
# ----------------------------------------------------------------------------------------------------------------------------------------------------------------
# gitaly_cluster-3aff1f2bd14e6c98 | 23c4422629234d62b62adacafd0a33a8364e8619 | 23c4422629234d62b62adacafd0a33a8364e8619 | 23c4422629234d62b62adacafd0a33a8364e8619
#
break line if line.start_with?("gitaly_cluster")
end
Support::Retrier.retry_until(raise_on_failure: false) do
replicas = wait_until_shell_command(%(docker exec gitlab-gitaly-ha bash -c 'gitlab-rake "gitlab:praefect:replicas[#{project_id}]"')) do |line|
QA::Runtime::Logger.debug(line.chomp)
# The output of the rake task looks something like this:
#
# Project name | gitaly1 (primary) | gitaly2 | gitaly3
# ----------------------------------------------------------------------------------------------------------------------------------------------------------------
# gitaly_cluster-3aff1f2bd14e6c98 | 23c4422629234d62b62adacafd0a33a8364e8619 | 23c4422629234d62b62adacafd0a33a8364e8619 | 23c4422629234d62b62adacafd0a33a8364e8619
#
break line if line.start_with?('gitaly_cluster')
break nil if line.include?('Something went wrong when getting replicas')
end
# We want to know if the checksums are identical
replicas.split('|').map(&:strip)[1..3].uniq.one?
# We want to know if the checksums are identical
replicas&.split('|')&.map(&:strip)&.slice(1..3)&.uniq&.one?
end
end
def start_primary_node
@ -54,6 +57,14 @@ module QA
stop_node(@praefect)
end
def stop_secondary_node
stop_node(@secondary_node)
end
def start_secondary_node
start_node(@secondary_node)
end
def start_node(name)
shell "docker start #{name}"
end
@ -120,6 +131,18 @@ module QA
result['data']['result'].map { |result| { node: result['metric']['storage'], value: result['value'][1].to_i } }
end
def replication_queue_incomplete_count
result = []
shell sql_to_docker_exec_cmd("select count(*) from replication_queue where state = 'ready' or state = 'in_progress';") do |line|
result << line
end
# The result looks like:
# count
# -----
# 1
result[2].to_i
end
def replication_queue_lock_count
result = []
shell sql_to_docker_exec_cmd("select count(*) from replication_queue_lock where acquired = 't';") do |line|
@ -276,6 +299,22 @@ module QA
end
end
def wait_for_secondary_node_health_check_failure
wait_for_health_check_failure(@secondary_node)
end
def wait_for_health_check_failure(node)
QA::Runtime::Logger.info("Waiting for Praefect to record a health check failure on #{node}")
wait_until_shell_command("docker exec #{@praefect} bash -c 'tail -n 1 /var/log/gitlab/praefect/current'") do |line|
QA::Runtime::Logger.debug(line.chomp)
log = JSON.parse(line)
log['msg'] == 'error when pinging healthcheck' && log['storage'] == node
rescue JSON::ParserError
# Ignore lines that can't be parsed as JSON
end
end
def wait_for_gitaly_check
Support::Waiter.repeat_until(max_attempts: 3) do
storage_ok = false
@ -292,35 +331,33 @@ module QA
end
end
def wait_for_gitlab_shell_check
wait_until_shell_command_matches(
"docker exec #{@gitlab} bash -c 'gitlab-rake gitlab:gitlab_shell:check'",
/Checking GitLab Shell ... Finished/
)
end
# Waits until there is an increase in the number of reads for
# any node compared to the number of reads provided
# any node compared to the number of reads provided. If a node
# has no pre-read data, consider it to have had zero reads.
def wait_for_read_count_change(pre_read_data)
diff_found = false
Support::Waiter.wait_until(sleep_interval: 5) do
query_read_distribution.each_with_index do |data, index|
diff_found = true if data[:value] > pre_read_data[index][:value]
diff_found = true if data[:value] > value_for_node(pre_read_data, data[:node])
end
diff_found
end
end
def value_for_node(data, node)
data.find(-> {0}) { |item| item[:node] == node }[:value]
end
def wait_for_reliable_connection
QA::Runtime::Logger.info('Wait until GitLab and Praefect can communicate reliably')
wait_for_praefect
wait_for_sql_ping
wait_for_storage_nodes
wait_for_gitlab_shell_check
wait_for_gitaly_check
end
def wait_for_replication(project_id)
Support::Waiter.wait_until(sleep_interval: 1) { replicated?(project_id) }
Support::Waiter.wait_until(sleep_interval: 1) { replication_queue_incomplete_count == 0 && replicated?(project_id) }
end
private

View File

@ -29,19 +29,46 @@ module QA
pre_read_data = praefect_manager.query_read_distribution
QA::Runtime::Logger.info('Fetching commits from the repository')
Parallel.each((1..number_of_reads)) do |index|
Resource::Repository::Commit.fabricate_via_api! do |commits|
commits.project = project
end
end
Parallel.each((1..number_of_reads)) { project.commits }
praefect_manager.wait_for_read_count_change(pre_read_data)
aggregate_failures "each gitaly node" do
praefect_manager.query_read_distribution.each_with_index do |data, index|
expect(data[:value])
.to be > pre_read_data[index][:value],
"Read counts did not differ for node #{pre_read_data[index][:node]}"
.to be > praefect_manager.value_for_node(pre_read_data, data[:node]),
"Read counts did not differ for node #{data[:node]}"
end
end
end
context 'when a node is unhealthy' do
before do
praefect_manager.stop_secondary_node
praefect_manager.wait_for_secondary_node_health_check_failure
end
after do
# Leave the cluster in a suitable state for subsequent tests
praefect_manager.start_secondary_node
praefect_manager.wait_for_health_check_all_nodes
praefect_manager.wait_for_reliable_connection
end
it 'does not read from the unhealthy node' do
pre_read_data = praefect_manager.query_read_distribution
QA::Runtime::Logger.info('Fetching commits from the repository')
Parallel.each((1..number_of_reads)) { project.commits }
praefect_manager.wait_for_read_count_change(pre_read_data)
post_read_data = praefect_manager.query_read_distribution
aggregate_failures "each gitaly node" do
expect(praefect_manager.value_for_node(post_read_data, 'gitaly1')).to be > praefect_manager.value_for_node(pre_read_data, 'gitaly1')
expect(praefect_manager.value_for_node(post_read_data, 'gitaly2')).to eq praefect_manager.value_for_node(pre_read_data, 'gitaly2')
expect(praefect_manager.value_for_node(post_read_data, 'gitaly3')).to be > praefect_manager.value_for_node(pre_read_data, 'gitaly3')
end
end
end

View File

@ -111,6 +111,7 @@ RSpec.describe Projects::Prometheus::AlertsController do
describe 'GET #show' do
let(:alert) do
create(:prometheus_alert,
:with_runbook_url,
project: project,
environment: environment,
prometheus_metric: metric)
@ -140,6 +141,7 @@ RSpec.describe Projects::Prometheus::AlertsController do
'query' => alert.query,
'operator' => alert.computed_operator,
'threshold' => alert.threshold,
'runbook_url' => alert.runbook_url,
'alert_path' => alert_path(alert)
}
end
@ -225,7 +227,8 @@ RSpec.describe Projects::Prometheus::AlertsController do
'title' => metric.title,
'query' => metric.query,
'operator' => '>',
'threshold' => 1.0
'threshold' => 1.0,
'runbook_url' => 'https://sample.runbook.com'
}
end
@ -234,6 +237,7 @@ RSpec.describe Projects::Prometheus::AlertsController do
opts,
operator: '>',
threshold: '1',
runbook_url: 'https://sample.runbook.com',
environment_id: environment,
prometheus_metric_id: metric
)
@ -250,14 +254,14 @@ RSpec.describe Projects::Prometheus::AlertsController do
expect(json_response).to include(alert_params)
end
it 'returns no_content for an invalid metric' do
it 'returns bad_request for an invalid metric' do
make_request(prometheus_metric_id: 'invalid')
expect(response).to have_gitlab_http_status(:no_content)
expect(response).to have_gitlab_http_status(:bad_request)
end
it_behaves_like 'unprivileged'
it_behaves_like 'project non-specific environment', :no_content
it_behaves_like 'project non-specific environment', :bad_request
end
describe 'PUT #update' do
@ -304,6 +308,12 @@ RSpec.describe Projects::Prometheus::AlertsController do
expect(json_response).to include(alert_params)
end
it 'returns bad_request for an invalid alert data' do
make_request(runbook_url: 'bad-url')
expect(response).to have_gitlab_http_status(:bad_request)
end
it_behaves_like 'unprivileged'
it_behaves_like 'project non-specific environment', :not_found
it_behaves_like 'project non-specific metric', :not_found

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
FactoryBot.define do
factory :ci_pipeline_artifact, class: 'Ci::PipelineArtifact' do
pipeline factory: :ci_pipeline
project { pipeline.project }
file_type { :code_coverage }
file_format { :raw }
file_store { Ci::PipelineArtifact::FILE_STORE_SUPPORTED.first }
size { 1.megabytes }
end
end

View File

@ -13,5 +13,9 @@ FactoryBot.define do
prometheus_metric do |alert|
build(:prometheus_metric, project: alert.project)
end
trait :with_runbook_url do
runbook_url { 'https://runbooks.gitlab.com/metric_gt_1'}
end
end
end

View File

@ -230,6 +230,7 @@ ci_pipelines:
- daily_report_results
- latest_builds_report_results
- messages
- pipeline_artifacts
ci_refs:
- project
- ci_pipelines
@ -519,6 +520,7 @@ project:
- vulnerability_statistic
- vulnerability_historical_statistics
- product_analytics_events
- pipeline_artifacts
award_emoji:
- awardable
- user

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::PipelineArtifact, type: :model do
let_it_be(:coverage_report) { create(:ci_pipeline_artifact) }
describe 'associations' do
it { is_expected.to belong_to(:pipeline) }
it { is_expected.to belong_to(:project) }
end
it_behaves_like 'having unique enum values'
describe 'validations' do
it { is_expected.to validate_presence_of(:pipeline) }
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:file_type) }
it { is_expected.to validate_presence_of(:file_format) }
it { is_expected.to validate_presence_of(:size) }
it { is_expected.to validate_uniqueness_of(:file_type).scoped_to([:pipeline_id]).ignoring_case_sensitivity }
context 'when attributes are valid' do
it 'returns no errors' do
expect(coverage_report).to be_valid
end
end
context 'when file_store is invalid' do
it 'returns errors' do
coverage_report.file_store = 0
expect(coverage_report).to be_invalid
expect(coverage_report.errors.full_messages).to eq(["File store is not included in the list"])
end
end
context 'when size is over 10 megabytes' do
it 'returns errors' do
coverage_report.size = 11.megabytes
expect(coverage_report).to be_invalid
end
end
end
end

View File

@ -46,6 +46,7 @@ RSpec.describe Ci::Pipeline, :mailer do
it { is_expected.to respond_to :git_author_email }
it { is_expected.to respond_to :short_sha }
it { is_expected.to delegate_method(:full_path).to(:project).with_prefix }
it { is_expected.to have_many(:pipeline_artifacts) }
describe 'associations' do
it 'has a bidirectional relationship with projects' do

View File

@ -122,6 +122,7 @@ RSpec.describe Project do
it { is_expected.to have_many(:reviews).inverse_of(:project) }
it { is_expected.to have_many(:packages).class_name('Packages::Package') }
it { is_expected.to have_many(:package_files).class_name('Packages::PackageFile') }
it { is_expected.to have_many(:pipeline_artifacts) }
it_behaves_like 'model with repository' do
let_it_be(:container) { create(:project, :repository, path: 'somewhere') }

View File

@ -74,6 +74,34 @@ RSpec.describe PrometheusAlert do
end
end
describe 'runbook validations' do
it 'disallow invalid urls' do
unsafe_url = %{https://replaceme.com/'><script>alert(document.cookie)</script>}
non_ascii_url = 'http://gitlab.com/user/project1/wiki/something€'
excessively_long_url = 'https://gitla' + 'b' * 1024 + '.com'
is_expected.not_to allow_values(
unsafe_url,
non_ascii_url,
excessively_long_url
).for(:runbook_url)
end
it 'allow valid urls' do
external_url = 'http://runbook.gitlab.com/'
internal_url = 'http://192.168.1.1'
blank_url = ''
nil_url = nil
is_expected.to allow_value(
external_url,
internal_url,
blank_url,
nil_url
).for(:runbook_url)
end
end
describe '#full_query' do
before do
subject.operator = "gt"
@ -91,6 +119,7 @@ RSpec.describe PrometheusAlert do
subject.operator = "gt"
subject.threshold = 1
subject.prometheus_metric = metric
subject.runbook_url = 'runbook'
end
it 'returns the params of the prometheus alert' do
@ -102,7 +131,11 @@ RSpec.describe PrometheusAlert do
"gitlab" => "hook",
"gitlab_alert_id" => metric.id,
"gitlab_prometheus_alert_id" => subject.id
})
},
"annotations" => {
"runbook" => "runbook"
}
)
end
end
end

View File

@ -16,7 +16,7 @@ RSpec.describe PrometheusAlertEntity do
end
it 'exposes prometheus_alert attributes' do
expect(subject).to include(:id, :title, :query, :operator, :threshold)
expect(subject).to include(:id, :title, :query, :operator, :threshold, :runbook_url)
end
it 'exposes alert_path' do