Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
6ac4a6713e
commit
39fa7d1eeb
|
|
@ -328,13 +328,6 @@ RSpec/LeakyConstantDeclaration:
|
|||
Exclude:
|
||||
- 'spec/**/*.rb'
|
||||
- 'qa/spec/**/*.rb'
|
||||
- 'ee/spec/lib/gitlab/geo/log_cursor/logger_spec.rb'
|
||||
- 'ee/spec/lib/gitlab/geo/log_helpers_spec.rb'
|
||||
- 'ee/spec/lib/gitlab/geo/replicator_spec.rb'
|
||||
- 'ee/spec/mailers/emails/service_desk_spec.rb'
|
||||
- 'ee/spec/migrations/remove_creations_in_gitlab_subscription_histories_spec.rb'
|
||||
- 'ee/spec/migrations/set_resolved_state_on_vulnerabilities_spec.rb'
|
||||
- 'ee/spec/models/repository_spec.rb'
|
||||
- 'ee/spec/presenters/security/vulnerable_project_presenter_spec.rb'
|
||||
- 'ee/spec/serializers/vulnerable_project_entity_spec.rb'
|
||||
- 'ee/spec/services/clusters/applications/check_upgrade_progress_service_spec.rb'
|
||||
|
|
|
|||
|
|
@ -491,13 +491,6 @@ Style/MultilineIfModifier:
|
|||
- 'app/services/ci/process_pipeline_service.rb'
|
||||
- 'lib/api/commit_statuses.rb'
|
||||
|
||||
# Offense count: 34
|
||||
# Cop supports --auto-correct.
|
||||
# Configuration parameters: Whitelist.
|
||||
# Whitelist: be, be_a, be_an, be_between, be_falsey, be_kind_of, be_instance_of, be_truthy, be_within, eq, eql, end_with, include, match, raise_error, respond_to, start_with
|
||||
Style/NestedParenthesizedCalls:
|
||||
Enabled: false
|
||||
|
||||
# Offense count: 25
|
||||
# Cop supports --auto-correct.
|
||||
# Configuration parameters: EnforcedStyle, MinBodyLength.
|
||||
|
|
|
|||
3
Gemfile
3
Gemfile
|
|
@ -493,3 +493,6 @@ gem 'erubi', '~> 1.9.0'
|
|||
# Monkey-patched in `config/initializers/mail_encoding_patch.rb`
|
||||
# See https://gitlab.com/gitlab-org/gitlab/issues/197386
|
||||
gem 'mail', '= 2.7.1'
|
||||
|
||||
# File encryption
|
||||
gem 'lockbox', '~> 0.3.3'
|
||||
|
|
|
|||
|
|
@ -605,6 +605,7 @@ GEM
|
|||
rb-inotify (~> 0.9, >= 0.9.7)
|
||||
ruby_dep (~> 1.2)
|
||||
locale (2.1.2)
|
||||
lockbox (0.3.3)
|
||||
lograge (0.10.0)
|
||||
actionpack (>= 4)
|
||||
activesupport (>= 4)
|
||||
|
|
@ -1280,6 +1281,7 @@ DEPENDENCIES
|
|||
license_finder (~> 5.4)
|
||||
licensee (~> 8.9)
|
||||
liquid (~> 4.0)
|
||||
lockbox (~> 0.3.3)
|
||||
lograge (~> 0.5)
|
||||
loofah (~> 2.2)
|
||||
lru_redux
|
||||
|
|
|
|||
|
|
@ -0,0 +1,97 @@
|
|||
<script>
|
||||
import { GlResizeObserverDirective } from '@gitlab/ui';
|
||||
import { GlBarChart } from '@gitlab/ui/dist/charts';
|
||||
import { getSvgIconPathContent } from '~/lib/utils/icon_utils';
|
||||
import { chartHeight } from '../../constants';
|
||||
import { barChartsDataParser, graphDataValidatorForValues } from '../../utils';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlBarChart,
|
||||
},
|
||||
directives: {
|
||||
GlResizeObserverDirective,
|
||||
},
|
||||
props: {
|
||||
graphData: {
|
||||
type: Object,
|
||||
required: true,
|
||||
validator: graphDataValidatorForValues.bind(null, false),
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
width: 0,
|
||||
height: chartHeight,
|
||||
svgs: {},
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
chartData() {
|
||||
return barChartsDataParser(this.graphData.metrics);
|
||||
},
|
||||
chartOptions() {
|
||||
return {
|
||||
dataZoom: [this.dataZoomConfig],
|
||||
};
|
||||
},
|
||||
xAxisTitle() {
|
||||
const { xLabel = '' } = this.graphData;
|
||||
return xLabel;
|
||||
},
|
||||
yAxisTitle() {
|
||||
const { y_label = '' } = this.graphData;
|
||||
return y_label; // eslint-disable-line babel/camelcase
|
||||
},
|
||||
xAxisType() {
|
||||
const { x_type = 'value' } = this.graphData;
|
||||
return x_type; // eslint-disable-line babel/camelcase
|
||||
},
|
||||
dataZoomConfig() {
|
||||
const handleIcon = this.svgs['scroll-handle'];
|
||||
|
||||
return handleIcon ? { handleIcon } : {};
|
||||
},
|
||||
},
|
||||
created() {
|
||||
this.setSvg('scroll-handle');
|
||||
},
|
||||
methods: {
|
||||
formatLegendLabel(query) {
|
||||
return `${query.label}`;
|
||||
},
|
||||
onResize() {
|
||||
if (!this.$refs.barChart) return;
|
||||
const { width } = this.$refs.barChart.$el.getBoundingClientRect();
|
||||
this.width = width;
|
||||
},
|
||||
setSvg(name) {
|
||||
getSvgIconPathContent(name)
|
||||
.then(path => {
|
||||
if (path) {
|
||||
this.$set(this.svgs, name, `path://${path}`);
|
||||
}
|
||||
})
|
||||
.catch(e => {
|
||||
// eslint-disable-next-line no-console, @gitlab/require-i18n-strings
|
||||
console.error('SVG could not be rendered correctly: ', e);
|
||||
});
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<div v-gl-resize-observer-directive="onResize">
|
||||
<gl-bar-chart
|
||||
ref="barChart"
|
||||
v-bind="$attrs"
|
||||
:data="chartData"
|
||||
:option="chartOptions"
|
||||
:width="width"
|
||||
:height="height"
|
||||
:x-axis-title="xAxisTitle"
|
||||
:y-axis-title="yAxisTitle"
|
||||
:x-axis-type="xAxisType"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
|
|
@ -18,6 +18,7 @@ import MonitorAnomalyChart from './charts/anomaly.vue';
|
|||
import MonitorSingleStatChart from './charts/single_stat.vue';
|
||||
import MonitorHeatmapChart from './charts/heatmap.vue';
|
||||
import MonitorColumnChart from './charts/column.vue';
|
||||
import MonitorBarChart from './charts/bar.vue';
|
||||
import MonitorStackedColumnChart from './charts/stacked_column.vue';
|
||||
import MonitorEmptyChart from './charts/empty_chart.vue';
|
||||
import TrackEventDirective from '~/vue_shared/directives/track_event';
|
||||
|
|
@ -31,6 +32,7 @@ export default {
|
|||
components: {
|
||||
MonitorSingleStatChart,
|
||||
MonitorColumnChart,
|
||||
MonitorBarChart,
|
||||
MonitorHeatmapChart,
|
||||
MonitorStackedColumnChart,
|
||||
MonitorEmptyChart,
|
||||
|
|
@ -259,6 +261,10 @@ export default {
|
|||
v-else-if="isPanelType('heatmap') && graphDataHasMetrics"
|
||||
:graph-data="graphData"
|
||||
/>
|
||||
<monitor-bar-chart
|
||||
v-else-if="isPanelType('bar') && graphDataHasMetrics"
|
||||
:graph-data="graphData"
|
||||
/>
|
||||
<monitor-column-chart
|
||||
v-else-if="isPanelType('column') && graphDataHasMetrics"
|
||||
:graph-data="graphData"
|
||||
|
|
|
|||
|
|
@ -46,7 +46,6 @@ export const metricStates = {
|
|||
};
|
||||
|
||||
export const sidebarAnimationDuration = 300; // milliseconds.
|
||||
|
||||
export const chartHeight = 300;
|
||||
|
||||
export const graphTypes = {
|
||||
|
|
|
|||
|
|
@ -73,14 +73,21 @@ const mapToMetricsViewModel = (metrics, defaultLabel) =>
|
|||
}));
|
||||
|
||||
/**
|
||||
* Maps an axis view model
|
||||
* Maps X-axis view model
|
||||
*
|
||||
* @param {Object} axis
|
||||
*/
|
||||
const mapXAxisToViewModel = ({ name = '' }) => ({ name });
|
||||
|
||||
/**
|
||||
* Maps Y-axis view model
|
||||
*
|
||||
* Defaults to a 2 digit precision and `number` format. It only allows
|
||||
* formats in the SUPPORTED_FORMATS array.
|
||||
*
|
||||
* @param {Object} axis
|
||||
*/
|
||||
const mapToAxisViewModel = ({ name = '', format = SUPPORTED_FORMATS.number, precision = 2 }) => {
|
||||
const mapYAxisToViewModel = ({ name = '', format = SUPPORTED_FORMATS.number, precision = 2 }) => {
|
||||
return {
|
||||
name,
|
||||
format: SUPPORTED_FORMATS[format] || SUPPORTED_FORMATS.number,
|
||||
|
|
@ -94,15 +101,30 @@ const mapToAxisViewModel = ({ name = '', format = SUPPORTED_FORMATS.number, prec
|
|||
* @param {Object} panel - Metrics panel
|
||||
* @returns {Object}
|
||||
*/
|
||||
const mapToPanelViewModel = ({ title = '', type, y_label, y_axis = {}, metrics = [] }) => {
|
||||
const mapPanelToViewModel = ({
|
||||
title = '',
|
||||
type,
|
||||
x_axis = {},
|
||||
x_label,
|
||||
y_label,
|
||||
y_axis = {},
|
||||
metrics = [],
|
||||
}) => {
|
||||
// Both `x_axis.name` and `x_label` are supported for now
|
||||
// https://gitlab.com/gitlab-org/gitlab/issues/210521
|
||||
const xAxis = mapXAxisToViewModel({ name: x_label, ...x_axis }); // eslint-disable-line babel/camelcase
|
||||
|
||||
// Both `y_axis.name` and `y_label` are supported for now
|
||||
// https://gitlab.com/gitlab-org/gitlab/issues/208385
|
||||
const yAxis = mapToAxisViewModel({ name: y_label, ...y_axis }); // eslint-disable-line babel/camelcase
|
||||
const yAxis = mapYAxisToViewModel({ name: y_label, ...y_axis }); // eslint-disable-line babel/camelcase
|
||||
|
||||
return {
|
||||
title,
|
||||
type,
|
||||
xLabel: xAxis.name,
|
||||
y_label: yAxis.name, // Changing y_label to yLabel is pending https://gitlab.com/gitlab-org/gitlab/issues/207198
|
||||
yAxis,
|
||||
xAxis,
|
||||
metrics: mapToMetricsViewModel(metrics, yAxis.name),
|
||||
};
|
||||
};
|
||||
|
|
@ -117,7 +139,7 @@ const mapToPanelGroupViewModel = ({ group = '', panels = [] }, i) => {
|
|||
return {
|
||||
key: `${slugify(group || 'default')}-${i}`,
|
||||
group,
|
||||
panels: panels.map(mapToPanelViewModel),
|
||||
panels: panels.map(mapPanelToViewModel),
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -132,4 +132,63 @@ export const timeRangeToUrl = (timeRange, url = window.location.href) => {
|
|||
return mergeUrlParams(params, toUrl);
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the metric value from first data point.
|
||||
* Currently only used for bar charts
|
||||
*
|
||||
* @param {Array} values data points
|
||||
* @returns {Number}
|
||||
*/
|
||||
const metricValueMapper = values => values[0]?.[1];
|
||||
|
||||
/**
|
||||
* Get the metric name from metric object
|
||||
* Currently only used for bar charts
|
||||
* e.g. { handler: '/query' }
|
||||
* { method: 'get' }
|
||||
*
|
||||
* @param {Object} metric metric object
|
||||
* @returns {String}
|
||||
*/
|
||||
const metricNameMapper = metric => Object.values(metric)?.[0];
|
||||
|
||||
/**
|
||||
* Parse metric object to extract metric value and name in
|
||||
* [<metric-value>, <metric-name>] format.
|
||||
* Currently only used for bar charts
|
||||
*
|
||||
* @param {Object} param0 metric object
|
||||
* @returns {Array}
|
||||
*/
|
||||
const resultMapper = ({ metric, values = [] }) => [
|
||||
metricValueMapper(values),
|
||||
metricNameMapper(metric),
|
||||
];
|
||||
|
||||
/**
|
||||
* Bar charts graph data parser to massage data from
|
||||
* backend to a format acceptable by bar charts component
|
||||
* in GitLab UI
|
||||
*
|
||||
* e.g.
|
||||
* {
|
||||
* SLO: [
|
||||
* [98, 'api'],
|
||||
* [99, 'web'],
|
||||
* [99, 'database']
|
||||
* ]
|
||||
* }
|
||||
*
|
||||
* @param {Array} data series information
|
||||
* @returns {Object}
|
||||
*/
|
||||
export const barChartsDataParser = (data = []) =>
|
||||
data?.reduce(
|
||||
(acc, { result = [], label }) => ({
|
||||
...acc,
|
||||
[label]: result.map(resultMapper),
|
||||
}),
|
||||
{},
|
||||
);
|
||||
|
||||
export default {};
|
||||
|
|
|
|||
|
|
@ -93,6 +93,11 @@
|
|||
|
||||
.alert-current-setting {
|
||||
max-width: 240px;
|
||||
|
||||
.badge.badge-danger {
|
||||
color: $red-500;
|
||||
background-color: $red-100;
|
||||
}
|
||||
}
|
||||
|
||||
.prometheus-graph-cursor {
|
||||
|
|
|
|||
|
|
@ -30,8 +30,7 @@ module AuthHelper
|
|||
|
||||
def qa_class_for_provider(provider)
|
||||
{
|
||||
saml: 'qa-saml-login-button',
|
||||
github: 'qa-github-login-button'
|
||||
saml: 'qa-saml-login-button'
|
||||
}[provider.to_sym]
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Services
|
||||
module DataFields
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
included do
|
||||
belongs_to :service
|
||||
|
||||
delegate :activated?, to: :service, allow_nil: true
|
||||
|
||||
validates :service, presence: true
|
||||
end
|
||||
|
||||
class_methods do
|
||||
def encryption_options
|
||||
{
|
||||
key: Settings.attr_encrypted_db_key_base_32,
|
||||
encode: true,
|
||||
mode: :per_attribute_iv,
|
||||
algorithm: 'aes-256-gcm'
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -44,6 +44,7 @@ module DataFields
|
|||
included do
|
||||
has_one :issue_tracker_data, autosave: true
|
||||
has_one :jira_tracker_data, autosave: true
|
||||
has_one :open_project_tracker_data, autosave: true
|
||||
|
||||
def data_fields
|
||||
raise NotImplementedError
|
||||
|
|
|
|||
|
|
@ -1,20 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class IssueTrackerData < ApplicationRecord
|
||||
belongs_to :service
|
||||
|
||||
delegate :activated?, to: :service, allow_nil: true
|
||||
|
||||
validates :service, presence: true
|
||||
|
||||
def self.encryption_options
|
||||
{
|
||||
key: Settings.attr_encrypted_db_key_base_32,
|
||||
encode: true,
|
||||
mode: :per_attribute_iv,
|
||||
algorithm: 'aes-256-gcm'
|
||||
}
|
||||
end
|
||||
include Services::DataFields
|
||||
|
||||
attr_encrypted :project_url, encryption_options
|
||||
attr_encrypted :issues_url, encryption_options
|
||||
|
|
|
|||
|
|
@ -1,20 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class JiraTrackerData < ApplicationRecord
|
||||
belongs_to :service
|
||||
|
||||
delegate :activated?, to: :service, allow_nil: true
|
||||
|
||||
validates :service, presence: true
|
||||
|
||||
def self.encryption_options
|
||||
{
|
||||
key: Settings.attr_encrypted_db_key_base_32,
|
||||
encode: true,
|
||||
mode: :per_attribute_iv,
|
||||
algorithm: 'aes-256-gcm'
|
||||
}
|
||||
end
|
||||
include Services::DataFields
|
||||
|
||||
attr_encrypted :url, encryption_options
|
||||
attr_encrypted :api_url, encryption_options
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class OpenProjectService < IssueTrackerService
|
||||
validates :url, public_url: true, presence: true, if: :activated?
|
||||
validates :api_url, public_url: true, allow_blank: true, if: :activated?
|
||||
validates :token, presence: true, if: :activated?
|
||||
validates :project_identifier_code, presence: true, if: :activated?
|
||||
|
||||
data_field :url, :api_url, :token, :closed_status_id, :project_identifier_code
|
||||
|
||||
def data_fields
|
||||
open_project_tracker_data || self.build_open_project_tracker_data
|
||||
end
|
||||
|
||||
def self.to_param
|
||||
'open_project'
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class OpenProjectTrackerData < ApplicationRecord
|
||||
include Services::DataFields
|
||||
|
||||
# When the Open Project is fresh installed, the default closed status id is "13" based on current version: v8.
|
||||
DEFAULT_CLOSED_STATUS_ID = "13"
|
||||
|
||||
attr_encrypted :url, encryption_options
|
||||
attr_encrypted :api_url, encryption_options
|
||||
attr_encrypted :token, encryption_options
|
||||
|
||||
def closed_status_id
|
||||
super || DEFAULT_CLOSED_STATUS_ID
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Terraform
|
||||
def self.table_name_prefix
|
||||
'terraform_'
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Terraform
|
||||
class State < ApplicationRecord
|
||||
belongs_to :project
|
||||
|
||||
validates :project_id, presence: true
|
||||
|
||||
after_save :update_file_store, if: :saved_change_to_file?
|
||||
|
||||
mount_uploader :file, StateUploader
|
||||
|
||||
def update_file_store
|
||||
# The file.object_store is set during `uploader.store!`
|
||||
# which happens after object is inserted/updated
|
||||
self.update_column(:file_store, file.object_store)
|
||||
end
|
||||
|
||||
def file_store
|
||||
super || StateUploader.default_store
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -6,11 +6,8 @@ module MergeRequests
|
|||
event_service.open_mr(merge_request, current_user)
|
||||
notification_service.new_merge_request(merge_request, current_user)
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/issues/208813
|
||||
if ::Feature.enabled?(:create_merge_request_pipelines_in_sidekiq, project)
|
||||
create_pipeline_for(merge_request, current_user)
|
||||
merge_request.update_head_pipeline
|
||||
end
|
||||
create_pipeline_for(merge_request, current_user)
|
||||
merge_request.update_head_pipeline
|
||||
|
||||
merge_request.diffs(include_stats: false).write_cache
|
||||
merge_request.create_cross_references!(current_user)
|
||||
|
|
|
|||
|
|
@ -21,12 +21,6 @@ module MergeRequests
|
|||
todo_service.new_merge_request(issuable, current_user)
|
||||
issuable.cache_merge_request_closes_issues!(current_user)
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/issues/208813
|
||||
unless ::Feature.enabled?(:create_merge_request_pipelines_in_sidekiq, project)
|
||||
create_pipeline_for(issuable, current_user)
|
||||
issuable.update_head_pipeline
|
||||
end
|
||||
|
||||
Gitlab::UsageDataCounters::MergeRequestCounter.count(:create)
|
||||
link_lfs_objects(issuable)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Terraform
|
||||
class StateUploader < GitlabUploader
|
||||
include ObjectStorage::Concern
|
||||
|
||||
storage_options Gitlab.config.terraform_state
|
||||
|
||||
delegate :project_id, to: :model
|
||||
|
||||
# Use Lockbox to encrypt/decrypt the stored file (registers CarrierWave callbacks)
|
||||
encrypt(key: :key)
|
||||
|
||||
def filename
|
||||
"#{model.id}.tfstate"
|
||||
end
|
||||
|
||||
def store_dir
|
||||
project_id.to_s
|
||||
end
|
||||
|
||||
def key
|
||||
OpenSSL::HMAC.digest('SHA256', Gitlab::Application.secrets.db_key_base, project_id.to_s)
|
||||
end
|
||||
|
||||
class << self
|
||||
def direct_upload_enabled?
|
||||
false
|
||||
end
|
||||
|
||||
def background_upload_enabled?
|
||||
false
|
||||
end
|
||||
|
||||
def proxy_download_enabled?
|
||||
true
|
||||
end
|
||||
|
||||
def default_store
|
||||
object_store_enabled? ? ObjectStorage::Store::REMOTE : ObjectStorage::Store::LOCAL
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -5,4 +5,4 @@
|
|||
-# per_page: number of items to fetch per page
|
||||
-# remote: data-remote
|
||||
%li.page-item.disabled.d-none.d-md-block
|
||||
= link_to raw(t 'views.pagination.truncate'), '#', class: 'page-link'
|
||||
= link_to raw(t('views.pagination.truncate')), '#', class: 'page-link'
|
||||
|
|
|
|||
|
|
@ -129,7 +129,7 @@
|
|||
|
||||
= render_if_exists 'shared/issuable/sidebar_weight', issuable_sidebar: issuable_sidebar
|
||||
|
||||
- if Feature.enabled?(:save_issuable_health_status, @project.group) && issuable_sidebar[:type] == "issue"
|
||||
- if issuable_sidebar.dig(:features_available, :health_status)
|
||||
.js-sidebar-status-entry-point
|
||||
|
||||
- if issuable_sidebar.has_key?(:confidential)
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
document.write('#{escape_javascript(stylesheet_link_tag "#{stylesheet_url 'snippets'}")}');
|
||||
document.write('#{escape_javascript(render 'shared/snippets/embed')}');
|
||||
document.write('#{escape_javascript(stylesheet_link_tag("#{stylesheet_url 'snippets'}"))}');
|
||||
document.write('#{escape_javascript(render('shared/snippets/embed'))}');
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ class IrkerWorker # rubocop:disable Scalability/IdempotentWorker
|
|||
commit = commit_from_id project, hook_attrs['id']
|
||||
sha = colorize_sha Commit.truncate_sha(hook_attrs['id'])
|
||||
author = hook_attrs['author']['name']
|
||||
files = colorize_nb_files(files_count commit)
|
||||
files = colorize_nb_files(files_count(commit))
|
||||
title = commit.title
|
||||
|
||||
sendtoirker "#{repo_name}/#{branch} #{sha} #{author} (#{files}): #{title}"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Create model to store Terraform state files
|
||||
merge_request: 26619
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Optimize ci builds non distinct counters in usage data
|
||||
merge_request: 28027
|
||||
author:
|
||||
type: performance
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add bar chart support to monitoring dashboard
|
||||
merge_request: 27155
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add possibility to conigure additional rails hosts with env variable
|
||||
merge_request: 28133
|
||||
author:
|
||||
type: other
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Create merge request pipelines in background jobs
|
||||
merge_request: 28024
|
||||
author:
|
||||
type: performance
|
||||
|
|
@ -320,6 +320,24 @@ production: &base
|
|||
# aws_signature_version: 4 # For creation of signed URLs. Set to 2 if provider does not support v4.
|
||||
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
|
||||
|
||||
## Terraform state
|
||||
terraform_state:
|
||||
enabled: true
|
||||
# The location where Terraform state files are stored (default: shared/terraform_state).
|
||||
# storage_path: shared/terraform_state
|
||||
object_store:
|
||||
enabled: false
|
||||
remote_directory: terraform_state # The bucket name
|
||||
connection:
|
||||
provider: AWS
|
||||
aws_access_key_id: AWS_ACCESS_KEY_ID
|
||||
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
|
||||
region: us-east-1
|
||||
# host: 'localhost' # default: s3.amazonaws.com
|
||||
# endpoint: 'http://127.0.0.1:9000' # default: nil
|
||||
# aws_signature_version: 4 # For creation of signed URLs. Set to 2 if provider does not support v4.
|
||||
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
|
||||
|
||||
## GitLab Pages
|
||||
pages:
|
||||
enabled: false
|
||||
|
|
@ -1193,6 +1211,19 @@ test:
|
|||
aws_access_key_id: AWS_ACCESS_KEY_ID
|
||||
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
|
||||
region: us-east-1
|
||||
|
||||
terraform_state:
|
||||
enabled: true
|
||||
storage_path: tmp/tests/terraform_state
|
||||
object_store:
|
||||
enabled: false
|
||||
remote_directory: terraform_state
|
||||
connection:
|
||||
provider: AWS # Only AWS supported at the moment
|
||||
aws_access_key_id: AWS_ACCESS_KEY_ID
|
||||
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
|
||||
region: us-east-1
|
||||
|
||||
gitlab:
|
||||
host: localhost
|
||||
port: 80
|
||||
|
|
|
|||
|
|
@ -369,6 +369,14 @@ Gitlab.ee do
|
|||
Settings.dependency_proxy['enabled'] = false unless Gitlab::Runtime.puma?
|
||||
end
|
||||
|
||||
#
|
||||
# Terraform state
|
||||
#
|
||||
Settings['terraform_state'] ||= Settingslogic.new({})
|
||||
Settings.terraform_state['enabled'] = true if Settings.terraform_state['enabled'].nil?
|
||||
Settings.terraform_state['storage_path'] = Settings.absolute(Settings.terraform_state['storage_path'] || File.join(Settings.shared['path'], "terraform_state"))
|
||||
Settings.terraform_state['object_store'] = ObjectStoreSettings.parse(Settings.terraform_state['object_store'])
|
||||
|
||||
#
|
||||
# Mattermost
|
||||
#
|
||||
|
|
|
|||
|
|
@ -4,4 +4,9 @@
|
|||
|
||||
if Rails.env.development?
|
||||
Rails.application.config.hosts += [Gitlab.config.gitlab.host, 'unix']
|
||||
|
||||
if ENV['RAILS_HOSTS']
|
||||
additional_hosts = ENV['RAILS_HOSTS'].split(',').select(&:presence)
|
||||
Rails.application.config.hosts += additional_hosts
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateTerraformStates < ActiveRecord::Migration[6.0]
|
||||
DOWNTIME = false
|
||||
|
||||
def change
|
||||
create_table :terraform_states do |t|
|
||||
t.references :project, index: true, foreign_key: { on_delete: :cascade }, null: false
|
||||
t.timestamps_with_timezone null: false
|
||||
t.integer :file_store, limit: 2
|
||||
t.string :file, limit: 255
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddIndexOnNameTypeEqCiBuildToCiBuilds < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_ci_builds_on_name_and_security_type_eq_ci_build'
|
||||
|
||||
def up
|
||||
add_concurrent_index :ci_builds, [:name, :id],
|
||||
name: INDEX_NAME,
|
||||
where: "((name)::text = ANY (ARRAY[('container_scanning'::character varying)::text, ('dast'::character varying)::text, ('dependency_scanning'::character varying)::text, ('license_management'::character varying)::text, ('sast'::character varying)::text, ('license_scanning'::character varying)::text])) AND ((type)::text = 'Ci::Build'::text)"
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :ci_builds, INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -5976,6 +5976,24 @@ CREATE SEQUENCE public.term_agreements_id_seq
|
|||
|
||||
ALTER SEQUENCE public.term_agreements_id_seq OWNED BY public.term_agreements.id;
|
||||
|
||||
CREATE TABLE public.terraform_states (
|
||||
id bigint NOT NULL,
|
||||
project_id bigint NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
updated_at timestamp with time zone NOT NULL,
|
||||
file_store smallint,
|
||||
file character varying(255)
|
||||
);
|
||||
|
||||
CREATE SEQUENCE public.terraform_states_id_seq
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
ALTER SEQUENCE public.terraform_states_id_seq OWNED BY public.terraform_states.id;
|
||||
|
||||
CREATE TABLE public.timelogs (
|
||||
id integer NOT NULL,
|
||||
time_spent integer NOT NULL,
|
||||
|
|
@ -7329,6 +7347,8 @@ ALTER TABLE ONLY public.tags ALTER COLUMN id SET DEFAULT nextval('public.tags_id
|
|||
|
||||
ALTER TABLE ONLY public.term_agreements ALTER COLUMN id SET DEFAULT nextval('public.term_agreements_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY public.terraform_states ALTER COLUMN id SET DEFAULT nextval('public.terraform_states_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY public.timelogs ALTER COLUMN id SET DEFAULT nextval('public.timelogs_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY public.todos ALTER COLUMN id SET DEFAULT nextval('public.todos_id_seq'::regclass);
|
||||
|
|
@ -8230,6 +8250,9 @@ ALTER TABLE ONLY public.tags
|
|||
ALTER TABLE ONLY public.term_agreements
|
||||
ADD CONSTRAINT term_agreements_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY public.terraform_states
|
||||
ADD CONSTRAINT terraform_states_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY public.timelogs
|
||||
ADD CONSTRAINT timelogs_pkey PRIMARY KEY (id);
|
||||
|
||||
|
|
@ -8604,6 +8627,8 @@ CREATE INDEX index_ci_builds_on_commit_id_and_type_and_name_and_ref ON public.ci
|
|||
|
||||
CREATE INDEX index_ci_builds_on_commit_id_and_type_and_ref ON public.ci_builds USING btree (commit_id, type, ref);
|
||||
|
||||
CREATE INDEX index_ci_builds_on_name_and_security_type_eq_ci_build ON public.ci_builds USING btree (name, id) WHERE (((name)::text = ANY (ARRAY[('container_scanning'::character varying)::text, ('dast'::character varying)::text, ('dependency_scanning'::character varying)::text, ('license_management'::character varying)::text, ('sast'::character varying)::text, ('license_scanning'::character varying)::text])) AND ((type)::text = 'Ci::Build'::text));
|
||||
|
||||
CREATE INDEX index_ci_builds_on_name_for_security_reports_values ON public.ci_builds USING btree (name) WHERE ((name)::text = ANY (ARRAY[('container_scanning'::character varying)::text, ('dast'::character varying)::text, ('dependency_scanning'::character varying)::text, ('license_management'::character varying)::text, ('sast'::character varying)::text, ('license_scanning'::character varying)::text]));
|
||||
|
||||
CREATE INDEX index_ci_builds_on_project_id_and_id ON public.ci_builds USING btree (project_id, id);
|
||||
|
|
@ -9972,6 +9997,8 @@ CREATE INDEX index_term_agreements_on_term_id ON public.term_agreements USING bt
|
|||
|
||||
CREATE INDEX index_term_agreements_on_user_id ON public.term_agreements USING btree (user_id);
|
||||
|
||||
CREATE INDEX index_terraform_states_on_project_id ON public.terraform_states USING btree (project_id);
|
||||
|
||||
CREATE INDEX index_timelogs_on_issue_id ON public.timelogs USING btree (issue_id);
|
||||
|
||||
CREATE INDEX index_timelogs_on_merge_request_id ON public.timelogs USING btree (merge_request_id);
|
||||
|
|
@ -11277,6 +11304,9 @@ ALTER TABLE ONLY public.pages_domain_acme_orders
|
|||
ALTER TABLE ONLY public.ci_subscriptions_projects
|
||||
ADD CONSTRAINT fk_rails_7871f9a97b FOREIGN KEY (upstream_project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.terraform_states
|
||||
ADD CONSTRAINT fk_rails_78f54ca485 FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.software_license_policies
|
||||
ADD CONSTRAINT fk_rails_7a7a2a92de FOREIGN KEY (software_license_id) REFERENCES public.software_licenses(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
@ -12749,6 +12779,7 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200304211738
|
||||
20200305121159
|
||||
20200305151736
|
||||
20200305200641
|
||||
20200306095654
|
||||
20200306160521
|
||||
20200306170211
|
||||
|
|
@ -12807,5 +12838,6 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200323122201
|
||||
20200323134519
|
||||
20200324115359
|
||||
20200325160952
|
||||
\.
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,135 @@
|
|||
# Terraform state administration (alpha)
|
||||
|
||||
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/2673) in GitLab 12.10.
|
||||
|
||||
GitLab can be used as a backend for [Terraform](../user/infrastructure/index.md) state
|
||||
files. The files are encrypted before being stored. This feature is enabled by default.
|
||||
|
||||
The storage location of these files defaults to:
|
||||
|
||||
- `/var/opt/gitlab/gitlab-rails/shared/terraform_state` for Omnibus GitLab installations.
|
||||
- `/home/git/gitlab/shared/terraform_state` for source installations.
|
||||
|
||||
These locations can be configured using the options described below.
|
||||
|
||||
## Using local storage
|
||||
|
||||
NOTE: **Note:**
|
||||
This is the default configuration
|
||||
|
||||
To change the location where Terraform state files are stored locally, follow the steps
|
||||
below.
|
||||
|
||||
**In Omnibus installations:**
|
||||
|
||||
1. To change the storage path for example to `/mnt/storage/terraform_state`, edit
|
||||
`/etc/gitlab/gitlab.rb` and add the following line:
|
||||
|
||||
```ruby
|
||||
gitlab_rails['terraform_state_enabled'] = true
|
||||
gitlab_rails['terraform_state_storage_path'] = "/mnt/storage/terraform_state"
|
||||
```
|
||||
|
||||
1. Save the file and [reconfigure GitLab][] for the changes to take effect.
|
||||
|
||||
**In installations from source:**
|
||||
|
||||
1. To change the storage path for example to `/mnt/storage/terraform_state`, edit
|
||||
`/home/git/gitlab/config/gitlab.yml` and add or amend the following lines:
|
||||
|
||||
```yaml
|
||||
terraform_state:
|
||||
enabled: true
|
||||
storage_path: /mnt/storage/terraform_state
|
||||
```
|
||||
|
||||
1. Save the file and [restart GitLab][] for the changes to take effect.
|
||||
|
||||
## Using object storage **(CORE ONLY)**
|
||||
|
||||
Instead of storing Terraform state files on disk, we recommend the use of an object
|
||||
store that is S3-compatible instead. This configuration relies on valid credentials to
|
||||
be configured already.
|
||||
|
||||
### Object storage settings
|
||||
|
||||
The following settings are:
|
||||
|
||||
- Nested under `terraform_state:` and then `object_store:` on source installations.
|
||||
- Prefixed by `terraform_state_object_store_` on Omnibus GitLab installations.
|
||||
|
||||
| Setting | Description | Default |
|
||||
|---------|-------------|---------|
|
||||
| `enabled` | Enable/disable object storage | `true` |
|
||||
| `remote_directory` | The bucket name where Terraform state files will be stored | |
|
||||
| `connection` | Various connection options described below | |
|
||||
|
||||
### S3-compatible connection settings
|
||||
|
||||
The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
|
||||
|
||||
| Setting | Description | Default |
|
||||
|---------|-------------|---------|
|
||||
| `provider` | Always `AWS` for compatible hosts | `AWS` |
|
||||
| `aws_access_key_id` | Credentials for AWS or compatible provider | |
|
||||
| `aws_secret_access_key` | Credentials for AWS or compatible provider | |
|
||||
| `aws_signature_version` | AWS signature version to use. 2 or 4 are valid options. Digital Ocean Spaces and other providers may need 2. | 4 |
|
||||
| `enable_signature_v4_streaming` | Set to true to enable HTTP chunked transfers with [AWS v4 signatures](https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html). Oracle Cloud S3 needs this to be false | `true` |
|
||||
| `region` | AWS region | us-east-1 |
|
||||
| `host` | S3-compatible host when not using AWS. For example, `localhost` or `storage.example.com` | `s3.amazonaws.com` |
|
||||
| `endpoint` | Can be used when configuring an S3-compatible service such as [MinIO](https://min.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
|
||||
| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | `false` |
|
||||
| `use_iam_profile` | For AWS S3, set to true to use an IAM profile instead of access keys | `false` |
|
||||
|
||||
**In Omnibus installations:**
|
||||
|
||||
1. Edit `/etc/gitlab/gitlab.rb` and add the following lines; replacing with
|
||||
the values you want:
|
||||
|
||||
```ruby
|
||||
gitlab_rails['terraform_state_enabled'] = true
|
||||
gitlab_rails['terraform_state_object_store_enabled'] = true
|
||||
gitlab_rails['terraform_state_object_store_remote_directory'] = "terraform_state"
|
||||
gitlab_rails['terraform_state_object_store_connection'] = {
|
||||
'provider' => 'AWS',
|
||||
'region' => 'eu-central-1',
|
||||
'aws_access_key_id' => 'AWS_ACCESS_KEY_ID',
|
||||
'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY'
|
||||
}
|
||||
```
|
||||
|
||||
NOTE: **Note:**
|
||||
If you are using AWS IAM profiles, be sure to omit the AWS access key and secret access key/value pairs.
|
||||
|
||||
```ruby
|
||||
gitlab_rails['terraform_state_object_store_connection'] = {
|
||||
'provider' => 'AWS',
|
||||
'region' => 'eu-central-1',
|
||||
'use_iam_profile' => true
|
||||
}
|
||||
```
|
||||
|
||||
1. Save the file and [reconfigure GitLab][] for the changes to take effect.
|
||||
|
||||
**In installations from source:**
|
||||
|
||||
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
|
||||
lines:
|
||||
|
||||
```yaml
|
||||
terraform_state:
|
||||
enabled: true
|
||||
object_store:
|
||||
enabled: true
|
||||
remote_directory: "terraform_state" # The bucket name
|
||||
connection:
|
||||
provider: AWS # Only AWS supported at the moment
|
||||
aws_access_key_id: AWS_ACESS_KEY_ID
|
||||
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
|
||||
region: eu-central-1
|
||||
```
|
||||
|
||||
1. Save the file and [restart GitLab][] for the changes to take effect.
|
||||
|
||||
[reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
|
||||
[restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
|
||||
|
|
@ -6228,6 +6228,11 @@ type Project {
|
|||
state: RequirementState
|
||||
): Requirement
|
||||
|
||||
"""
|
||||
Number of requirements for the project by their state
|
||||
"""
|
||||
requirementStatesCount: RequirementStatesCount
|
||||
|
||||
"""
|
||||
Find requirements. Available only when feature flag `requirements_management` is enabled.
|
||||
"""
|
||||
|
|
@ -7030,6 +7035,21 @@ enum RequirementState {
|
|||
OPENED
|
||||
}
|
||||
|
||||
"""
|
||||
Counts of requirements by their state.
|
||||
"""
|
||||
type RequirementStatesCount {
|
||||
"""
|
||||
Number of archived requirements
|
||||
"""
|
||||
archived: Int
|
||||
|
||||
"""
|
||||
Number of opened requirements
|
||||
"""
|
||||
opened: Int
|
||||
}
|
||||
|
||||
type RootStorageStatistics {
|
||||
"""
|
||||
The CI artifacts size in bytes
|
||||
|
|
|
|||
|
|
@ -18629,6 +18629,20 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "requirementStatesCount",
|
||||
"description": "Number of requirements for the project by their state",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "OBJECT",
|
||||
"name": "RequirementStatesCount",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "requirements",
|
||||
"description": "Find requirements. Available only when feature flag `requirements_management` is enabled.",
|
||||
|
|
@ -21114,6 +21128,47 @@
|
|||
],
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "RequirementStatesCount",
|
||||
"description": "Counts of requirements by their state.",
|
||||
"fields": [
|
||||
{
|
||||
"name": "archived",
|
||||
"description": "Number of archived requirements",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "Int",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "opened",
|
||||
"description": "Number of opened requirements",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "Int",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [
|
||||
|
||||
],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "RootStorageStatistics",
|
||||
|
|
|
|||
|
|
@ -910,6 +910,7 @@ Information about pagination in a connection.
|
|||
| `repository` | Repository | Git repository of the project |
|
||||
| `requestAccessEnabled` | Boolean | Indicates if users can request member access to the project |
|
||||
| `requirement` | Requirement | Find a single requirement. Available only when feature flag `requirements_management` is enabled. |
|
||||
| `requirementStatesCount` | RequirementStatesCount | Number of requirements for the project by their state |
|
||||
| `sentryDetailedError` | SentryDetailedError | Detailed version of a Sentry error on the project |
|
||||
| `sentryErrors` | SentryErrorCollection | Paginated collection of Sentry errors on the project |
|
||||
| `serviceDeskAddress` | String | E-mail address of the service desk. |
|
||||
|
|
@ -1032,6 +1033,15 @@ Check permissions for the current user on a requirement
|
|||
| `readRequirement` | Boolean! | Indicates the user can perform `read_requirement` on this resource |
|
||||
| `updateRequirement` | Boolean! | Indicates the user can perform `update_requirement` on this resource |
|
||||
|
||||
## RequirementStatesCount
|
||||
|
||||
Counts of requirements by their state.
|
||||
|
||||
| Name | Type | Description |
|
||||
| --- | ---- | ---------- |
|
||||
| `archived` | Int | Number of archived requirements |
|
||||
| `opened` | Int | Number of opened requirements |
|
||||
|
||||
## RootStorageStatistics
|
||||
|
||||
| Name | Type | Description |
|
||||
|
|
|
|||
|
|
@ -14,5 +14,10 @@ module Gitlab
|
|||
author ||= "Anonymous"
|
||||
"*Created by: #{author}*\n\n"
|
||||
end
|
||||
|
||||
def assignee_line(assignee)
|
||||
assignee ||= "Anonymous"
|
||||
"*Assigned to: #{assignee}*\n\n"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,12 +3,50 @@
|
|||
module Gitlab
|
||||
module JiraImport
|
||||
class IssueSerializer
|
||||
attr_reader :jira_issue, :project, :params, :formatter
|
||||
|
||||
def initialize(project, jira_issue, params = {})
|
||||
@jira_issue = jira_issue
|
||||
@project = project
|
||||
@params = params
|
||||
@formatter = Gitlab::ImportFormatter.new
|
||||
end
|
||||
|
||||
def execute
|
||||
# this is going to be implemented in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/27201
|
||||
{}
|
||||
{
|
||||
iid: params[:iid],
|
||||
project_id: project.id,
|
||||
description: description,
|
||||
title: title,
|
||||
state_id: map_status(jira_issue.status.statusCategory),
|
||||
updated_at: jira_issue.updated,
|
||||
created_at: jira_issue.created,
|
||||
author_id: project.creator_id # TODO: map actual author: https://gitlab.com/gitlab-org/gitlab/-/issues/210580
|
||||
}
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def title
|
||||
"[#{jira_issue.key}] #{jira_issue.summary}"
|
||||
end
|
||||
|
||||
def description
|
||||
body = []
|
||||
body << formatter.author_line(jira_issue.reporter.displayName)
|
||||
body << formatter.assignee_line(jira_issue.assignee.displayName) if jira_issue.assignee
|
||||
body << jira_issue.description
|
||||
|
||||
body.join
|
||||
end
|
||||
|
||||
def map_status(jira_status_category)
|
||||
case jira_status_category["key"].downcase
|
||||
when 'done'
|
||||
Issuable::STATE_ID_MAP[:closed]
|
||||
else
|
||||
Issuable::STATE_ID_MAP[:opened]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -14,11 +14,11 @@ module Gitlab::UsageDataCounters
|
|||
end
|
||||
|
||||
def count(event)
|
||||
increment(redis_key event)
|
||||
increment(redis_key(event))
|
||||
end
|
||||
|
||||
def read(event)
|
||||
total_count(redis_key event)
|
||||
total_count(redis_key(event))
|
||||
end
|
||||
|
||||
def totals
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ module SystemCheck
|
|||
sudo_gitlab("\"#{Gitlab.config.git.bin_path}\" config --global core.autocrlf \"#{OPTIONS['core.autocrlf']}\"")
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section 'GitLab'
|
||||
see_installation_guide_section('GitLab')
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ module SystemCheck
|
|||
'Update config/gitlab.yml to match your setup'
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section 'GitLab'
|
||||
see_installation_guide_section('GitLab')
|
||||
)
|
||||
fix_and_rerun
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ module SystemCheck
|
|||
'Update config/gitlab.yml to match your setup'
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section 'GitLab'
|
||||
see_installation_guide_section('GitLab')
|
||||
)
|
||||
fix_and_rerun
|
||||
end
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ module SystemCheck
|
|||
'Install the init script'
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section 'Install Init Script'
|
||||
see_installation_guide_section('Install Init Script')
|
||||
)
|
||||
fix_and_rerun
|
||||
end
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ module SystemCheck
|
|||
'Re-download the init script'
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section 'Install Init Script'
|
||||
see_installation_guide_section('Install Init Script')
|
||||
)
|
||||
fix_and_rerun
|
||||
end
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ module SystemCheck
|
|||
"sudo chmod -R u+rwX #{log_path}"
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section 'GitLab'
|
||||
see_installation_guide_section('GitLab')
|
||||
)
|
||||
fix_and_rerun
|
||||
end
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ module SystemCheck
|
|||
"sudo chmod -R u+rwX #{tmp_path}"
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section 'GitLab'
|
||||
see_installation_guide_section('GitLab')
|
||||
)
|
||||
fix_and_rerun
|
||||
end
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ module SystemCheck
|
|||
"sudo -u #{gitlab_user} mkdir #{Rails.root}/public/uploads"
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section 'GitLab'
|
||||
see_installation_guide_section('GitLab')
|
||||
)
|
||||
fix_and_rerun
|
||||
end
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ module SystemCheck
|
|||
"sudo chmod 700 #{uploads_fullpath}"
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section 'GitLab'
|
||||
see_installation_guide_section('GitLab')
|
||||
)
|
||||
fix_and_rerun
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ module SystemCheck
|
|||
"sudo find #{uploads_fullpath} -type d -not -path #{uploads_fullpath} -exec chmod 0700 {} \\;"
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section 'GitLab'
|
||||
see_installation_guide_section('GitLab')
|
||||
)
|
||||
fix_and_rerun
|
||||
end
|
||||
|
|
|
|||
|
|
@ -15900,6 +15900,9 @@ msgstr ""
|
|||
msgid "PrometheusAlerts|%{count} alerts applied"
|
||||
msgstr ""
|
||||
|
||||
msgid "PrometheusAlerts|%{firingCount} firing"
|
||||
msgstr ""
|
||||
|
||||
msgid "PrometheusAlerts|Add alert"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -15918,6 +15921,12 @@ msgstr ""
|
|||
msgid "PrometheusAlerts|Error saving alert"
|
||||
msgstr ""
|
||||
|
||||
msgid "PrometheusAlerts|Firing: %{alerts}"
|
||||
msgstr ""
|
||||
|
||||
msgid "PrometheusAlerts|Firing: %{alert}"
|
||||
msgstr ""
|
||||
|
||||
msgid "PrometheusAlerts|Operator"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
12
qa/qa.rb
12
qa/qa.rb
|
|
@ -134,7 +134,6 @@ module QA
|
|||
autoload :LDAPNoServer, 'qa/scenario/test/integration/ldap_no_server'
|
||||
autoload :LDAPTLS, 'qa/scenario/test/integration/ldap_tls'
|
||||
autoload :InstanceSAML, 'qa/scenario/test/integration/instance_saml'
|
||||
autoload :OAuth, 'qa/scenario/test/integration/oauth'
|
||||
autoload :Kubernetes, 'qa/scenario/test/integration/kubernetes'
|
||||
autoload :Mattermost, 'qa/scenario/test/integration/mattermost'
|
||||
autoload :ObjectStorage, 'qa/scenario/test/integration/object_storage'
|
||||
|
|
@ -483,17 +482,6 @@ module QA
|
|||
autoload :ConfigureJob, 'qa/vendor/jenkins/page/configure_job'
|
||||
end
|
||||
end
|
||||
|
||||
module Github
|
||||
module Page
|
||||
autoload :Base, 'qa/vendor/github/page/base'
|
||||
autoload :Login, 'qa/vendor/github/page/login'
|
||||
end
|
||||
end
|
||||
|
||||
module OnePassword
|
||||
autoload :CLI, 'qa/vendor/one_password/cli'
|
||||
end
|
||||
end
|
||||
|
||||
# Classes that provide support to other parts of the framework.
|
||||
|
|
|
|||
|
|
@ -35,7 +35,6 @@ module QA
|
|||
|
||||
view 'app/helpers/auth_helper.rb' do
|
||||
element :saml_login_button
|
||||
element :github_login_button
|
||||
end
|
||||
|
||||
view 'app/views/layouts/devise.html.haml' do
|
||||
|
|
@ -139,11 +138,6 @@ module QA
|
|||
click_element :standard_tab
|
||||
end
|
||||
|
||||
def sign_in_with_github
|
||||
set_initial_password_if_present
|
||||
click_element :github_login_button
|
||||
end
|
||||
|
||||
def sign_in_with_saml
|
||||
set_initial_password_if_present
|
||||
click_element :saml_login_button
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
module Scenario
|
||||
module Test
|
||||
module Integration
|
||||
class OAuth < Test::Instance::All
|
||||
tags :oauth
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
# This test is skipped instead of quarantine because continuously running
|
||||
# this test may cause the user to hit GitHub's rate limits thus blocking the user.
|
||||
# Related issue: https://gitlab.com/gitlab-org/gitlab/issues/196517
|
||||
context 'Manage', :orchestrated, :oauth, :skip do
|
||||
describe 'OAuth login' do
|
||||
it 'User logs in to GitLab with GitHub OAuth' do
|
||||
Runtime::Browser.visit(:gitlab, Page::Main::Login)
|
||||
|
||||
Page::Main::Login.perform(&:sign_in_with_github)
|
||||
Vendor::Github::Page::Login.perform(&:login)
|
||||
|
||||
expect(page).to have_content('Welcome to GitLab')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
module Vendor
|
||||
module Github
|
||||
module Page
|
||||
class Base
|
||||
include Capybara::DSL
|
||||
include Scenario::Actable
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'capybara/dsl'
|
||||
require 'benchmark'
|
||||
|
||||
module QA
|
||||
module Vendor
|
||||
module Github
|
||||
module Page
|
||||
class Login < Page::Base
|
||||
def login
|
||||
fill_in 'login', with: QA::Runtime::Env.github_username
|
||||
fill_in 'password', with: QA::Runtime::Env.github_password
|
||||
click_on 'Sign in'
|
||||
|
||||
Support::Retrier.retry_until(raise_on_failure: true, sleep_interval: 35) do
|
||||
fresh_otp = nil
|
||||
|
||||
time = Benchmark.realtime do
|
||||
fresh_otp = OnePassword::CLI.instance.fresh_otp
|
||||
end
|
||||
|
||||
QA::Runtime::Logger.info("Returned fresh_otp: #{fresh_otp} in #{time} seconds")
|
||||
|
||||
fill_in 'otp', with: fresh_otp
|
||||
|
||||
click_on 'Verify'
|
||||
|
||||
!has_text?('Two-factor authentication failed', wait: 1.0)
|
||||
end
|
||||
|
||||
click_on 'Authorize gitlab-qa' if has_button?('Authorize gitlab-qa')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'benchmark'
|
||||
|
||||
module QA
|
||||
module Vendor
|
||||
module OnePassword
|
||||
class CLI
|
||||
include Singleton
|
||||
|
||||
def initialize
|
||||
@email = QA::Runtime::Env.gitlab_qa_1p_email
|
||||
@password = QA::Runtime::Env.gitlab_qa_1p_password
|
||||
@secret = QA::Runtime::Env.gitlab_qa_1p_secret
|
||||
@github_uuid = QA::Runtime::Env.gitlab_qa_1p_github_uuid
|
||||
end
|
||||
|
||||
def fresh_otp
|
||||
otps = []
|
||||
|
||||
# Fetches a fresh OTP and returns it only after op provides the same OTP twice
|
||||
# An OTP is valid for 30 seconds so 70 attempts with 0.5 interval would ensure we complete 1 cycle
|
||||
Support::Retrier.retry_until(max_attempts: 70, sleep_interval: 0.5) do
|
||||
otps << fetch_otp
|
||||
otps.size >= 3 && otps[-1] == otps[-2] && otps[-1] != otps[-3]
|
||||
end
|
||||
|
||||
otps.last
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def fetch_otp
|
||||
result = nil
|
||||
|
||||
time = Benchmark.realtime do
|
||||
result = `#{op_path} get totp #{@github_uuid} --session=#{session_token}`.to_i
|
||||
end
|
||||
|
||||
QA::Runtime::Logger.info("Fetched OTP: #{result} in: #{time} seconds")
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
# OP session tokens are valid for 30 minutes. We are caching the session token here and this is fine currently
|
||||
# as we just have one test that is not expected to go over 30 minutes.
|
||||
# But note that if we add more tests that use this class, we might need to add a mechanism to invalidate
|
||||
# the cache after 30 minutes or if the session_token is rejected by op CLI.
|
||||
def session_token
|
||||
@session_token ||= `echo '#{@password}' | #{op_path} signin gitlab.1password.com #{@email} #{@secret} --output=raw --shorthand=gitlab_qa`
|
||||
end
|
||||
|
||||
def op_path
|
||||
File.expand_path(File.join(%W[qa vendor one_password #{os} op]))
|
||||
end
|
||||
|
||||
def os
|
||||
RUBY_PLATFORM.include?("darwin") ? "darwin" : "linux"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
Binary file not shown.
Binary file not shown.
|
|
@ -23,12 +23,12 @@ describe QA::Page::Base do
|
|||
|
||||
it 'makes it possible to define page views' do
|
||||
expect(subject.views.size).to eq 2
|
||||
expect(subject.views).to all(be_an_instance_of QA::Page::View)
|
||||
expect(subject.views).to all(be_an_instance_of(QA::Page::View))
|
||||
end
|
||||
|
||||
it 'populates views objects with data about elements' do
|
||||
expect(subject.elements.size).to eq 3
|
||||
expect(subject.elements).to all(be_an_instance_of QA::Page::Element)
|
||||
expect(subject.elements).to all(be_an_instance_of(QA::Page::Element))
|
||||
expect(subject.elements.map(&:name))
|
||||
.to eq [:something, :something_else, :another_element]
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
describe QA::Scenario::Test::Integration::OAuth do
|
||||
describe '#perform' do
|
||||
it_behaves_like 'a QA scenario class' do
|
||||
let(:tags) { [:oauth] }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -126,6 +126,26 @@ FactoryBot.define do
|
|||
end
|
||||
end
|
||||
|
||||
factory :open_project_service do
|
||||
project
|
||||
active { true }
|
||||
|
||||
transient do
|
||||
url { 'http://openproject.example.com' }
|
||||
api_url { 'http://openproject.example.com/issues/:id' }
|
||||
token { 'supersecret' }
|
||||
closed_status_id { '15' }
|
||||
project_identifier_code { 'PRJ-1' }
|
||||
end
|
||||
|
||||
after(:build) do |service, evaluator|
|
||||
create(:open_project_tracker_data, service: service,
|
||||
url: evaluator.url, api_url: evaluator.api_url, token: evaluator.token,
|
||||
closed_status_id: evaluator.closed_status_id, project_identifier_code: evaluator.project_identifier_code
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
trait :jira_cloud_service do
|
||||
url { 'https://mysite.atlassian.net' }
|
||||
username { 'jira_user' }
|
||||
|
|
|
|||
|
|
@ -9,4 +9,12 @@ FactoryBot.define do
|
|||
factory :issue_tracker_data do
|
||||
service
|
||||
end
|
||||
|
||||
factory :open_project_tracker_data do
|
||||
service
|
||||
url { 'http://openproject.example.com'}
|
||||
token { 'supersecret' }
|
||||
project_identifier_code { 'PRJ-1' }
|
||||
closed_status_id { '15' }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
FactoryBot.define do
|
||||
factory :terraform_state, class: 'Terraform::State' do
|
||||
project { create(:project) }
|
||||
|
||||
trait :with_file do
|
||||
file { fixture_file_upload('spec/fixtures/terraform/terraform.tfstate') }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"version": 4,
|
||||
"terraform_version": "0.12.21",
|
||||
"serial": 1,
|
||||
"lineage": "25e05991-243d-28d6-ebe3-ee0baae462cf",
|
||||
"outputs": {},
|
||||
"resources": []
|
||||
}
|
||||
|
|
@ -17,3 +17,20 @@ export const GlTooltip = {
|
|||
return h('div', this.$attrs, this.$slots.default);
|
||||
},
|
||||
};
|
||||
|
||||
export const GlPopoverDirective = {
|
||||
bind() {},
|
||||
};
|
||||
|
||||
export const GlPopover = {
|
||||
props: {
|
||||
cssClasses: {
|
||||
type: Array,
|
||||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
},
|
||||
render(h) {
|
||||
return h('div', this.$attrs, this.$slots.default);
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -0,0 +1,54 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import { GlBarChart } from '@gitlab/ui/dist/charts';
|
||||
import Bar from '~/monitoring/components/charts/bar.vue';
|
||||
import { barMockData } from '../../mock_data';
|
||||
|
||||
jest.mock('~/lib/utils/icon_utils', () => ({
|
||||
getSvgIconPathContent: jest.fn().mockResolvedValue('mockSvgPathContent'),
|
||||
}));
|
||||
|
||||
describe('Bar component', () => {
|
||||
let barChart;
|
||||
let store;
|
||||
|
||||
beforeEach(() => {
|
||||
barChart = shallowMount(Bar, {
|
||||
propsData: {
|
||||
graphData: barMockData,
|
||||
},
|
||||
store,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
barChart.destroy();
|
||||
});
|
||||
|
||||
describe('wrapped components', () => {
|
||||
describe('GitLab UI bar chart', () => {
|
||||
let glbarChart;
|
||||
let chartData;
|
||||
|
||||
beforeEach(() => {
|
||||
glbarChart = barChart.find(GlBarChart);
|
||||
chartData = barChart.vm.chartData[barMockData.metrics[0].label];
|
||||
});
|
||||
|
||||
it('is a Vue instance', () => {
|
||||
expect(glbarChart.isVueInstance()).toBe(true);
|
||||
});
|
||||
|
||||
it('should display a label on the x axis', () => {
|
||||
expect(glbarChart.vm.xAxisTitle).toBe(barMockData.xLabel);
|
||||
});
|
||||
|
||||
it('should return chartData as array of arrays', () => {
|
||||
expect(chartData).toBeInstanceOf(Array);
|
||||
|
||||
chartData.forEach(item => {
|
||||
expect(item).toBeInstanceOf(Array);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -703,3 +703,50 @@ export const stackedColumnMockedData = {
|
|||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const barMockData = {
|
||||
title: 'SLA Trends - Primary Services',
|
||||
type: 'bar-chart',
|
||||
xLabel: 'service',
|
||||
y_label: 'percentile',
|
||||
metrics: [
|
||||
{
|
||||
id: 'sla_trends_primary_services',
|
||||
series_name: 'group 1',
|
||||
metric_id: 'undefined_sla_trends_primary_services',
|
||||
metricId: 'undefined_sla_trends_primary_services',
|
||||
query_range:
|
||||
'avg(avg_over_time(slo_observation_status{environment="gprd", stage=~"main|", type=~"api|web|git|registry|sidekiq|ci-runners"}[1d])) by (type)',
|
||||
unit: 'Percentile',
|
||||
label: 'SLA',
|
||||
prometheus_endpoint_path:
|
||||
'/gitlab-com/metrics-dogfooding/-/environments/266/prometheus/api/v1/query_range?query=clamp_min%28clamp_max%28avg%28avg_over_time%28slo_observation_status%7Benvironment%3D%22gprd%22%2C+stage%3D~%22main%7C%22%2C+type%3D~%22api%7Cweb%7Cgit%7Cregistry%7Csidekiq%7Cci-runners%22%7D%5B1d%5D%29%29+by+%28type%29%2C1%29%2C0%29',
|
||||
result: [
|
||||
{
|
||||
metric: { type: 'api' },
|
||||
values: [[1583995208, '0.9935198135198128']],
|
||||
},
|
||||
{
|
||||
metric: { type: 'git' },
|
||||
values: [[1583995208, '0.9975296513504401']],
|
||||
},
|
||||
{
|
||||
metric: { type: 'registry' },
|
||||
values: [[1583995208, '0.9994716394716395']],
|
||||
},
|
||||
{
|
||||
metric: { type: 'sidekiq' },
|
||||
values: [[1583995208, '0.9948251748251747']],
|
||||
},
|
||||
{
|
||||
metric: { type: 'web' },
|
||||
values: [[1583995208, '0.9535664335664336']],
|
||||
},
|
||||
{
|
||||
metric: { type: 'postgresql_database' },
|
||||
values: [[1583995208, '0.9335664335664336']],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
|
|||
|
|
@ -25,6 +25,10 @@ describe('mapToDashboardViewModel', () => {
|
|||
panels: [
|
||||
{
|
||||
title: 'Title A',
|
||||
xLabel: '',
|
||||
xAxis: {
|
||||
name: '',
|
||||
},
|
||||
type: 'chart-type',
|
||||
y_label: 'Y Label A',
|
||||
metrics: [],
|
||||
|
|
@ -44,6 +48,10 @@ describe('mapToDashboardViewModel', () => {
|
|||
{
|
||||
title: 'Title A',
|
||||
type: 'chart-type',
|
||||
xLabel: '',
|
||||
xAxis: {
|
||||
name: '',
|
||||
},
|
||||
y_label: 'Y Label A',
|
||||
yAxis: {
|
||||
name: 'Y Label A',
|
||||
|
|
@ -114,6 +122,28 @@ describe('mapToDashboardViewModel', () => {
|
|||
|
||||
const getMappedPanel = () => mapToDashboardViewModel(dashboard).panelGroups[0].panels[0];
|
||||
|
||||
it('panel with x_label', () => {
|
||||
setupWithPanel({
|
||||
title: panelTitle,
|
||||
x_label: 'x label',
|
||||
});
|
||||
|
||||
expect(getMappedPanel()).toEqual({
|
||||
title: panelTitle,
|
||||
xLabel: 'x label',
|
||||
xAxis: {
|
||||
name: 'x label',
|
||||
},
|
||||
y_label: '',
|
||||
yAxis: {
|
||||
name: '',
|
||||
format: SUPPORTED_FORMATS.number,
|
||||
precision: 2,
|
||||
},
|
||||
metrics: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('group y_axis defaults', () => {
|
||||
setupWithPanel({
|
||||
title: panelTitle,
|
||||
|
|
@ -121,7 +151,11 @@ describe('mapToDashboardViewModel', () => {
|
|||
|
||||
expect(getMappedPanel()).toEqual({
|
||||
title: panelTitle,
|
||||
xLabel: '',
|
||||
y_label: '',
|
||||
xAxis: {
|
||||
name: '',
|
||||
},
|
||||
yAxis: {
|
||||
name: '',
|
||||
format: SUPPORTED_FORMATS.number,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import {
|
|||
graphDataPrometheusQuery,
|
||||
graphDataPrometheusQueryRange,
|
||||
anomalyMockGraphData,
|
||||
barMockData,
|
||||
} from './mock_data';
|
||||
|
||||
jest.mock('~/lib/utils/url_utility');
|
||||
|
|
@ -210,4 +211,67 @@ describe('monitoring/utils', () => {
|
|||
expect(mergeUrlParams).toHaveBeenCalledWith({ duration_seconds: `${seconds}` }, fromUrl);
|
||||
});
|
||||
});
|
||||
|
||||
describe('barChartsDataParser', () => {
|
||||
const singleMetricExpected = {
|
||||
SLA: [
|
||||
['0.9935198135198128', 'api'],
|
||||
['0.9975296513504401', 'git'],
|
||||
['0.9994716394716395', 'registry'],
|
||||
['0.9948251748251747', 'sidekiq'],
|
||||
['0.9535664335664336', 'web'],
|
||||
['0.9335664335664336', 'postgresql_database'],
|
||||
],
|
||||
};
|
||||
|
||||
const multipleMetricExpected = {
|
||||
...singleMetricExpected,
|
||||
SLA_2: Object.values(singleMetricExpected)[0],
|
||||
};
|
||||
|
||||
const barMockDataWithMultipleMetrics = {
|
||||
...barMockData,
|
||||
metrics: [
|
||||
barMockData.metrics[0],
|
||||
{
|
||||
...barMockData.metrics[0],
|
||||
label: 'SLA_2',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
[
|
||||
{
|
||||
input: { metrics: undefined },
|
||||
output: {},
|
||||
testCase: 'barChartsDataParser returns {} with undefined',
|
||||
},
|
||||
{
|
||||
input: { metrics: null },
|
||||
output: {},
|
||||
testCase: 'barChartsDataParser returns {} with null',
|
||||
},
|
||||
{
|
||||
input: { metrics: [] },
|
||||
output: {},
|
||||
testCase: 'barChartsDataParser returns {} with []',
|
||||
},
|
||||
{
|
||||
input: barMockData,
|
||||
output: singleMetricExpected,
|
||||
testCase: 'barChartsDataParser returns single series object with single metrics',
|
||||
},
|
||||
{
|
||||
input: barMockDataWithMultipleMetrics,
|
||||
output: multipleMetricExpected,
|
||||
testCase: 'barChartsDataParser returns multiple series object with multiple metrics',
|
||||
},
|
||||
].forEach(({ input, output, testCase }) => {
|
||||
it(testCase, () => {
|
||||
expect(monitoringUtils.barChartsDataParser(input.metrics)).toEqual(
|
||||
expect.objectContaining(output),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -196,7 +196,7 @@ describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode, :request_store
|
|||
subject.request_admin_mode!
|
||||
subject.enable_admin_mode!(password: user.password)
|
||||
|
||||
expect(session).to include(expected_session_entry(be_within(1.second).of Time.now))
|
||||
expect(session).to include(expected_session_entry(be_within(1.second).of(Time.now)))
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ describe Gitlab::Ci::Build::Artifacts::Metadata::Entry do
|
|||
describe '#children' do
|
||||
subject { |example| path(example).children }
|
||||
|
||||
it { is_expected.to all(be_an_instance_of described_class) }
|
||||
it { is_expected.to all(be_an_instance_of(described_class)) }
|
||||
it do
|
||||
is_expected.to contain_exactly entry('path/dir_1/file_1'),
|
||||
entry('path/dir_1/file_b'),
|
||||
|
|
@ -78,7 +78,7 @@ describe Gitlab::Ci::Build::Artifacts::Metadata::Entry do
|
|||
subject { |example| path(example).files }
|
||||
|
||||
it { is_expected.to all(be_file) }
|
||||
it { is_expected.to all(be_an_instance_of described_class) }
|
||||
it { is_expected.to all(be_an_instance_of(described_class)) }
|
||||
it do
|
||||
is_expected.to contain_exactly entry('path/dir_1/file_1'),
|
||||
entry('path/dir_1/file_b')
|
||||
|
|
@ -90,7 +90,7 @@ describe Gitlab::Ci::Build::Artifacts::Metadata::Entry do
|
|||
subject { |example| path(example).directories }
|
||||
|
||||
it { is_expected.to all(be_directory) }
|
||||
it { is_expected.to all(be_an_instance_of described_class) }
|
||||
it { is_expected.to all(be_an_instance_of(described_class)) }
|
||||
it { is_expected.to contain_exactly entry('path/dir_1/subdir/') }
|
||||
end
|
||||
|
||||
|
|
@ -98,7 +98,7 @@ describe Gitlab::Ci::Build::Artifacts::Metadata::Entry do
|
|||
subject { |example| path(example).directories(parent: true) }
|
||||
|
||||
it { is_expected.to all(be_directory) }
|
||||
it { is_expected.to all(be_an_instance_of described_class) }
|
||||
it { is_expected.to all(be_an_instance_of(described_class)) }
|
||||
it do
|
||||
is_expected.to contain_exactly entry('path/dir_1/subdir/'),
|
||||
entry('path/')
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ describe Gitlab::Email::Message::RepositoryPush do
|
|||
describe '#diffs' do
|
||||
subject { message.diffs }
|
||||
|
||||
it { is_expected.to all(be_an_instance_of Gitlab::Diff::File) }
|
||||
it { is_expected.to all(be_an_instance_of(Gitlab::Diff::File)) }
|
||||
end
|
||||
|
||||
describe '#diffs_count' do
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ describe Gitlab::Gfm::UploadsRewriter do
|
|||
end
|
||||
|
||||
it 'throw an error' do
|
||||
expect { rewriter.rewrite(new_project) }.to raise_error(an_instance_of(StandardError).and having_attributes(message: "Invalid path"))
|
||||
expect { rewriter.rewrite(new_project) }.to raise_error(an_instance_of(StandardError).and(having_attributes(message: "Invalid path")))
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -252,6 +252,7 @@ services:
|
|||
- service_hook
|
||||
- jira_tracker_data
|
||||
- issue_tracker_data
|
||||
- open_project_tracker_data
|
||||
hooks:
|
||||
- project
|
||||
- web_hook_logs
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::ImportFormatter do
|
||||
let(:formatter) { Gitlab::ImportFormatter.new }
|
||||
|
||||
describe '#comment' do
|
||||
it 'creates the correct string' do
|
||||
expect(formatter.comment('Name', '2020-02-02', 'some text')).to eq(
|
||||
"\n\n*By Name on 2020-02-02*\n\nsome text"
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#author_line' do
|
||||
it 'returns the correct string with provided author name' do
|
||||
expect(formatter.author_line('Name')).to eq("*Created by: Name*\n\n")
|
||||
end
|
||||
|
||||
it 'returns the correct string with Anonymous name if author not provided' do
|
||||
expect(formatter.author_line(nil)).to eq("*Created by: Anonymous*\n\n")
|
||||
end
|
||||
end
|
||||
|
||||
describe '#assignee_line' do
|
||||
it 'returns the correct string with provided author name' do
|
||||
expect(formatter.assignee_line('Name')).to eq("*Assigned to: Name*\n\n")
|
||||
end
|
||||
|
||||
it 'returns the correct string with Anonymous name if author not provided' do
|
||||
expect(formatter.assignee_line(nil)).to eq("*Assigned to: Anonymous*\n\n")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::JiraImport::IssueSerializer do
|
||||
describe '#execute' do
|
||||
let_it_be(:project) { create(:project) }
|
||||
|
||||
let(:iid) { 5 }
|
||||
let(:key) { 'PROJECT-5' }
|
||||
let(:summary) { 'some title' }
|
||||
let(:description) { 'basic description' }
|
||||
let(:created_at) { '2020-01-01 20:00:00' }
|
||||
let(:updated_at) { '2020-01-10 20:00:00' }
|
||||
let(:assignee) { double(displayName: 'Solver') }
|
||||
let(:jira_status) { 'new' }
|
||||
let(:jira_issue) do
|
||||
double(
|
||||
id: '1234',
|
||||
key: key,
|
||||
summary: summary,
|
||||
description: description,
|
||||
created: created_at,
|
||||
updated: updated_at,
|
||||
assignee: assignee,
|
||||
reporter: double(displayName: 'Reporter'),
|
||||
status: double(statusCategory: { 'key' => jira_status })
|
||||
)
|
||||
end
|
||||
let(:params) { { iid: iid } }
|
||||
|
||||
let(:expected_description) do
|
||||
<<~MD
|
||||
*Created by: Reporter*
|
||||
|
||||
*Assigned to: Solver*
|
||||
|
||||
basic description
|
||||
MD
|
||||
end
|
||||
|
||||
subject { described_class.new(project, jira_issue, params).execute }
|
||||
|
||||
context 'attributes setting' do
|
||||
it 'sets the basic attributes' do
|
||||
expect(subject).to eq(
|
||||
iid: iid,
|
||||
project_id: project.id,
|
||||
description: expected_description.strip,
|
||||
title: "[#{key}] #{summary}",
|
||||
state_id: 1,
|
||||
updated_at: updated_at,
|
||||
created_at: created_at,
|
||||
author_id: project.creator_id
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with done status' do
|
||||
let(:jira_status) { 'done' }
|
||||
|
||||
it 'maps the status to closed' do
|
||||
expect(subject[:state_id]).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'without the assignee' do
|
||||
let(:assignee) { nil }
|
||||
|
||||
it 'does not include assignee in the description' do
|
||||
expected_description = <<~MD
|
||||
*Created by: Reporter*
|
||||
|
||||
basic description
|
||||
MD
|
||||
|
||||
expect(subject[:description]).to eq(expected_description.strip)
|
||||
end
|
||||
end
|
||||
|
||||
context 'without the iid' do
|
||||
let(:params) { {} }
|
||||
|
||||
it 'does not set the iid' do
|
||||
expect(subject[:iid]).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -230,7 +230,7 @@ describe Notify do
|
|||
is_expected.to have_referable_subject(issue, reply: true)
|
||||
is_expected.to have_body_text(status)
|
||||
is_expected.to have_body_text(current_user_sanitized)
|
||||
is_expected.to have_body_text(project_issue_path project, issue)
|
||||
is_expected.to have_body_text(project_issue_path(project, issue))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe JiraTrackerData do
|
||||
let(:service) { create(:jira_service, active: false, properties: {}) }
|
||||
let(:service) { create(:jira_service, active: false) }
|
||||
|
||||
describe 'Associations' do
|
||||
it { is_expected.to belong_to(:service) }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe OpenProjectService do
|
||||
describe 'Validations' do
|
||||
context 'when service is active' do
|
||||
before do
|
||||
subject.active = true
|
||||
end
|
||||
|
||||
it { is_expected.to validate_presence_of(:url) }
|
||||
it { is_expected.to validate_presence_of(:token) }
|
||||
it { is_expected.to validate_presence_of(:project_identifier_code) }
|
||||
|
||||
it_behaves_like 'issue tracker service URL attribute', :url
|
||||
it_behaves_like 'issue tracker service URL attribute', :api_url
|
||||
end
|
||||
|
||||
context 'when service is inactive' do
|
||||
before do
|
||||
subject.active = false
|
||||
end
|
||||
|
||||
it { is_expected.not_to validate_presence_of(:url) }
|
||||
it { is_expected.not_to validate_presence_of(:token) }
|
||||
it { is_expected.not_to validate_presence_of(:project_identifier_code) }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'Associations' do
|
||||
it { is_expected.to belong_to :project }
|
||||
it { is_expected.to have_one :service_hook }
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe OpenProjectTrackerData do
|
||||
describe 'Associations' do
|
||||
it { is_expected.to belong_to(:service) }
|
||||
end
|
||||
|
||||
describe 'closed_status_id' do
|
||||
it 'returns the set value' do
|
||||
expect(build(:open_project_tracker_data).closed_status_id).to eq('15')
|
||||
end
|
||||
|
||||
it 'returns the default value if not set' do
|
||||
expect(build(:open_project_tracker_data, closed_status_id: nil).closed_status_id).to eq('13')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Terraform::State do
|
||||
subject { create(:terraform_state, :with_file) }
|
||||
|
||||
it { is_expected.to belong_to(:project) }
|
||||
|
||||
it { is_expected.to validate_presence_of(:project_id) }
|
||||
|
||||
before do
|
||||
stub_terraform_state_object_storage(Terraform::StateUploader)
|
||||
end
|
||||
|
||||
describe '#file_store' do
|
||||
context 'when no value is set' do
|
||||
it 'returns the default store of the uploader' do
|
||||
[ObjectStorage::Store::LOCAL, ObjectStorage::Store::REMOTE].each do |store|
|
||||
expect(Terraform::StateUploader).to receive(:default_store).and_return(store)
|
||||
expect(described_class.new.file_store).to eq(store)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a value is set' do
|
||||
it 'returns the value' do
|
||||
[ObjectStorage::Store::LOCAL, ObjectStorage::Store::REMOTE].each do |store|
|
||||
expect(build(:terraform_state, file_store: store).file_store).to eq(store)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#update_file_store' do
|
||||
context 'when file is stored in object storage' do
|
||||
it 'sets file_store to remote' do
|
||||
expect(subject.file_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when file is stored locally' do
|
||||
before do
|
||||
stub_terraform_state_object_storage(Terraform::StateUploader, enabled: false)
|
||||
end
|
||||
|
||||
it 'sets file_store to local' do
|
||||
expect(subject.file_store).to eq(ObjectStorage::Store::LOCAL)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -910,13 +910,13 @@ describe API::MergeRequests do
|
|||
get api("/projects/#{project.id}/merge_requests/#{merge_request.reload.iid}", user)
|
||||
|
||||
expect(json_response['merged_by']['id']).to eq(merge_request.metrics.merged_by_id)
|
||||
expect(Time.parse json_response['merged_at']).to be_like_time(merge_request.metrics.merged_at)
|
||||
expect(Time.parse(json_response['merged_at'])).to be_like_time(merge_request.metrics.merged_at)
|
||||
expect(json_response['closed_by']['id']).to eq(merge_request.metrics.latest_closed_by_id)
|
||||
expect(Time.parse json_response['closed_at']).to be_like_time(merge_request.metrics.latest_closed_at)
|
||||
expect(Time.parse(json_response['closed_at'])).to be_like_time(merge_request.metrics.latest_closed_at)
|
||||
expect(json_response['pipeline']['id']).to eq(merge_request.metrics.pipeline_id)
|
||||
expect(Time.parse json_response['latest_build_started_at']).to be_like_time(merge_request.metrics.latest_build_started_at)
|
||||
expect(Time.parse json_response['latest_build_finished_at']).to be_like_time(merge_request.metrics.latest_build_finished_at)
|
||||
expect(Time.parse json_response['first_deployed_to_production_at']).to be_like_time(merge_request.metrics.first_deployed_to_production_at)
|
||||
expect(Time.parse(json_response['latest_build_started_at'])).to be_like_time(merge_request.metrics.latest_build_started_at)
|
||||
expect(Time.parse(json_response['latest_build_finished_at'])).to be_like_time(merge_request.metrics.latest_build_finished_at)
|
||||
expect(Time.parse(json_response['first_deployed_to_production_at'])).to be_like_time(merge_request.metrics.first_deployed_to_production_at)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -53,19 +53,5 @@ describe MergeRequests::AfterCreateService do
|
|||
|
||||
after_create_service.execute(merge_request)
|
||||
end
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/issues/208813
|
||||
context 'when the create_merge_request_pipelines_in_sidekiq flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(create_merge_request_pipelines_in_sidekiq: false)
|
||||
end
|
||||
|
||||
it 'does not create a pipeline or update the HEAD pipeline' do
|
||||
expect(after_create_service).not_to receive(:create_pipeline_for)
|
||||
expect(merge_request).not_to receive(:update_head_pipeline)
|
||||
|
||||
after_create_service.execute(merge_request)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -129,22 +129,6 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
|
|||
end
|
||||
end
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/issues/208813
|
||||
context 'when the create_merge_request_pipelines_in_sidekiq flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(create_merge_request_pipelines_in_sidekiq: false)
|
||||
end
|
||||
|
||||
it 'creates a pipeline and updates the HEAD pipeline' do
|
||||
expect(service).to receive(:create_pipeline_for)
|
||||
expect_next_instance_of(MergeRequest) do |merge_request|
|
||||
expect(merge_request).to receive(:update_head_pipeline)
|
||||
end
|
||||
|
||||
service.execute
|
||||
end
|
||||
end
|
||||
|
||||
context 'when head pipelines already exist for merge request source branch', :sidekiq_inline do
|
||||
let(:shas) { project.repository.commits(opts[:source_branch], limit: 2).map(&:id) }
|
||||
let!(:pipeline_1) { create(:ci_pipeline, project: project, ref: opts[:source_branch], project_id: project.id, sha: shas[1]) }
|
||||
|
|
|
|||
|
|
@ -27,6 +27,6 @@ describe Projects::GroupLinks::CreateService, '#execute' do
|
|||
end
|
||||
|
||||
it 'returns error if user is not allowed to share with a group' do
|
||||
expect { subject.execute(create :group) }.not_to change { project.project_group_links.count }
|
||||
expect { subject.execute(create(:group)) }.not_to change { project.project_group_links.count }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ describe Releases::CreateService do
|
|||
|
||||
context 'when no milestone is passed in' do
|
||||
it 'creates a release without a milestone tied to it' do
|
||||
expect(params.key? :milestones).to be_falsey
|
||||
expect(params.key?(:milestones)).to be_falsey
|
||||
|
||||
service.execute
|
||||
release = project.releases.last
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ describe Spam::SpamCheckService do
|
|||
issue.description = 'SPAM!'
|
||||
end
|
||||
|
||||
context 'when indicated as spam by akismet' do
|
||||
context 'when indicated as spam by Akismet' do
|
||||
before do
|
||||
allow(Spam::AkismetService).to receive(:new).and_return(double(spam?: true))
|
||||
end
|
||||
|
|
@ -115,7 +115,7 @@ describe Spam::SpamCheckService do
|
|||
|
||||
it_behaves_like 'akismet spam'
|
||||
|
||||
it 'checks as spam' do
|
||||
it 'marks as spam' do
|
||||
subject
|
||||
|
||||
expect(issue.reload.spam).to be_truthy
|
||||
|
|
@ -125,7 +125,7 @@ describe Spam::SpamCheckService do
|
|||
context 'when allow_possible_spam feature flag is true' do
|
||||
it_behaves_like 'akismet spam'
|
||||
|
||||
it 'does not check as spam' do
|
||||
it 'does not mark as spam' do
|
||||
subject
|
||||
|
||||
expect(issue.spam).to be_falsey
|
||||
|
|
@ -133,7 +133,7 @@ describe Spam::SpamCheckService do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when not indicated as spam by akismet' do
|
||||
context 'when not indicated as spam by Akismet' do
|
||||
before do
|
||||
allow(Spam::AkismetService).to receive(:new).and_return(double(spam?: false))
|
||||
end
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ describe WikiPages::UpdateService do
|
|||
end
|
||||
|
||||
it 'reports the error' do
|
||||
expect(service.execute page).to be_invalid
|
||||
expect(service.execute(page)).to be_invalid
|
||||
.and have_attributes(errors: be_present)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -70,6 +70,13 @@ module StubObjectStorage
|
|||
**params)
|
||||
end
|
||||
|
||||
def stub_terraform_state_object_storage(uploader = described_class, **params)
|
||||
stub_object_storage_uploader(config: Gitlab.config.terraform_state.object_store,
|
||||
uploader: uploader,
|
||||
remote_directory: 'terraform_state',
|
||||
**params)
|
||||
end
|
||||
|
||||
def stub_object_storage_multipart_init(endpoint, upload_id = "upload_id")
|
||||
stub_request(:post, %r{\A#{endpoint}tmp/uploads/[a-z0-9-]*\?uploads\z})
|
||||
.to_return status: 200, body: <<-EOS.strip_heredoc
|
||||
|
|
|
|||
|
|
@ -0,0 +1,82 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Terraform::StateUploader do
|
||||
subject { terraform_state.file }
|
||||
|
||||
let(:terraform_state) { create(:terraform_state, file: fixture_file_upload('spec/fixtures/terraform/terraform.tfstate')) }
|
||||
|
||||
before do
|
||||
stub_terraform_state_object_storage
|
||||
end
|
||||
|
||||
describe '#filename' do
|
||||
it 'contains the ID of the terraform state record' do
|
||||
expect(subject.filename).to include(terraform_state.id.to_s)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#store_dir' do
|
||||
it 'contains the ID of the project' do
|
||||
expect(subject.store_dir).to include(terraform_state.project_id.to_s)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#key' do
|
||||
it 'creates a digest with a secret key and the project id' do
|
||||
expect(OpenSSL::HMAC)
|
||||
.to receive(:digest)
|
||||
.with('SHA256', Gitlab::Application.secrets.db_key_base, terraform_state.project_id.to_s)
|
||||
.and_return('digest')
|
||||
|
||||
expect(subject.key).to eq('digest')
|
||||
end
|
||||
end
|
||||
|
||||
describe 'encryption' do
|
||||
it 'encrypts the stored file' do
|
||||
expect(subject.file.read).not_to eq(fixture_file('terraform/terraform.tfstate'))
|
||||
end
|
||||
|
||||
it 'decrypts the file when reading' do
|
||||
expect(subject.read).to eq(fixture_file('terraform/terraform.tfstate'))
|
||||
end
|
||||
end
|
||||
|
||||
describe '.direct_upload_enabled?' do
|
||||
it 'returns false' do
|
||||
expect(described_class.direct_upload_enabled?).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.background_upload_enabled?' do
|
||||
it 'returns false' do
|
||||
expect(described_class.background_upload_enabled?).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.proxy_download_enabled?' do
|
||||
it 'returns true' do
|
||||
expect(described_class.proxy_download_enabled?).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.default_store' do
|
||||
context 'when object storage is enabled' do
|
||||
it 'returns REMOTE' do
|
||||
expect(described_class.default_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when object storage is disabled' do
|
||||
before do
|
||||
stub_terraform_state_object_storage(enabled: false)
|
||||
end
|
||||
|
||||
it 'returns LOCAL' do
|
||||
expect(described_class.default_store).to eq(ObjectStorage::Store::LOCAL)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
Loading…
Reference in New Issue