Add latest changes from gitlab-org/gitlab@master
|
|
@ -1518,7 +1518,7 @@ ee/lib/ee/api/entities/project.rb
|
|||
/gems/gitlab-housekeeper/
|
||||
/keeps/
|
||||
|
||||
^[Storage Statistics] @gitlab-org/fulfillment/utilization/be
|
||||
^[Storage Statistics] @jagood @suraj_tripathy @vij
|
||||
/app/models/namespace/root_storage_statistics.rb
|
||||
/ee/app/models/ee/namespace/root_storage_statistics.rb
|
||||
/app/models/project_statistics.rb
|
||||
|
|
|
|||
|
|
@ -2666,7 +2666,6 @@ Layout/LineLength:
|
|||
- 'lib/gitlab/tracking/event_definition.rb'
|
||||
- 'lib/gitlab/usage/metric_definition.rb'
|
||||
- 'lib/gitlab/usage/metrics/aggregates/aggregate.rb'
|
||||
- 'lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection.rb'
|
||||
- 'lib/gitlab/usage/metrics/aggregates/sources/postgres_hll.rb'
|
||||
- 'lib/gitlab/usage/service_ping_report.rb'
|
||||
- 'lib/gitlab/usage_data.rb'
|
||||
|
|
@ -3794,7 +3793,6 @@ Layout/LineLength:
|
|||
- 'spec/lib/gitlab/usage/metric_definition_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metric_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb'
|
||||
|
|
|
|||
|
|
@ -3972,7 +3972,6 @@ RSpec/FeatureCategory:
|
|||
- 'spec/lib/gitlab/usage/metric_definition_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metric_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/instrumentations/active_user_count_metric_spec.rb'
|
||||
|
|
|
|||
|
|
@ -67,6 +67,7 @@ export default {
|
|||
:text="currentSortText"
|
||||
:sort-options="$options.sortOptions"
|
||||
:sort-by="currentSortOption"
|
||||
data-testid="catalog-sorting-option-button"
|
||||
@sortByChange="setSelectedSortOption"
|
||||
@sortDirectionChange="onSortDirectionChange"
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ import {
|
|||
import { issuableAttributesQueries } from 'ee_else_ce/sidebar/queries/constants';
|
||||
import { createAlert } from '~/alert';
|
||||
import { PathIdSeparator } from '~/related_issues/constants';
|
||||
import { WORK_ITEM_TYPE_ENUM_EPIC } from '~/work_items/constants';
|
||||
|
||||
export default {
|
||||
noAttributeId,
|
||||
|
|
@ -123,6 +124,10 @@ export default {
|
|||
includeWorkItems: this.showWorkItemEpics,
|
||||
};
|
||||
|
||||
if (this.showWorkItemEpics) {
|
||||
variables.types = [WORK_ITEM_TYPE_ENUM_EPIC];
|
||||
}
|
||||
|
||||
if (epicIidPattern.test(this.searchTerm)) {
|
||||
const matches = this.searchTerm.match(epicIidPattern);
|
||||
variables.iidStartsWith = matches.groups.iid;
|
||||
|
|
@ -222,12 +227,7 @@ export default {
|
|||
@show="handleShow"
|
||||
@shown="setFocus"
|
||||
>
|
||||
<gl-search-box-by-type
|
||||
v-if="!showWorkItemEpics"
|
||||
ref="search"
|
||||
v-model="searchTerm"
|
||||
:placeholder="__('Search')"
|
||||
/>
|
||||
<gl-search-box-by-type ref="search" v-model="searchTerm" :placeholder="__('Search')" />
|
||||
<gl-dropdown-item
|
||||
:data-testid="`no-${formatIssuableAttribute.kebab}-item`"
|
||||
is-check-item
|
||||
|
|
|
|||
|
|
@ -185,7 +185,7 @@ export default {
|
|||
};
|
||||
},
|
||||
canLockWorkItem() {
|
||||
return this.canUpdate && this.glFeatures.workItemsMvc;
|
||||
return this.canUpdate && this.glFeatures.workItemsBeta;
|
||||
},
|
||||
canPromoteToObjective() {
|
||||
return this.canUpdate && this.workItemType === WORK_ITEM_TYPE_VALUE_KEY_RESULT;
|
||||
|
|
|
|||
|
|
@ -14,9 +14,6 @@ import {
|
|||
WIDGET_TYPE_TIME_TRACKING,
|
||||
WIDGET_TYPE_WEIGHT,
|
||||
WIDGET_TYPE_COLOR,
|
||||
WORK_ITEM_TYPE_VALUE_KEY_RESULT,
|
||||
WORK_ITEM_TYPE_VALUE_OBJECTIVE,
|
||||
WORK_ITEM_TYPE_VALUE_TASK,
|
||||
} from '../constants';
|
||||
import WorkItemAssigneesInline from './work_item_assignees_inline.vue';
|
||||
import WorkItemAssigneesWithEdit from './work_item_assignees_with_edit.vue';
|
||||
|
|
@ -111,13 +108,6 @@ export default {
|
|||
workItemMilestone() {
|
||||
return this.isWidgetPresent(WIDGET_TYPE_MILESTONE);
|
||||
},
|
||||
showWorkItemParent() {
|
||||
return (
|
||||
this.workItemType === WORK_ITEM_TYPE_VALUE_OBJECTIVE ||
|
||||
this.workItemType === WORK_ITEM_TYPE_VALUE_KEY_RESULT ||
|
||||
this.workItemType === WORK_ITEM_TYPE_VALUE_TASK
|
||||
);
|
||||
},
|
||||
workItemParent() {
|
||||
return this.isWidgetPresent(WIDGET_TYPE_HIERARCHY)?.parent;
|
||||
},
|
||||
|
|
@ -302,7 +292,7 @@ export default {
|
|||
@error="$emit('error', $event)"
|
||||
/>
|
||||
</template>
|
||||
<template v-if="showWorkItemParent">
|
||||
<template v-if="workItemHierarchy">
|
||||
<work-item-parent
|
||||
v-if="glFeatures.workItemsMvc2"
|
||||
class="gl-mb-5 gl-pt-5 gl-border-t gl-border-gray-50"
|
||||
|
|
@ -336,7 +326,7 @@ export default {
|
|||
:total-time-spent="workItemTimeTracking.totalTimeSpent"
|
||||
/>
|
||||
<participants
|
||||
v-if="workItemParticipants && glFeatures.workItemsMvc"
|
||||
v-if="workItemParticipants && glFeatures.workItemsBeta"
|
||||
class="gl-mb-5 gl-pt-5 gl-border-t gl-border-gray-50"
|
||||
:number-of-less-participants="10"
|
||||
:participants="workItemParticipants.participants.nodes"
|
||||
|
|
|
|||
|
|
@ -321,6 +321,7 @@ export const SUPPORTED_PARENT_TYPE_MAP = {
|
|||
[WORK_ITEM_TYPE_VALUE_OBJECTIVE]: [WORK_ITEM_TYPE_ENUM_OBJECTIVE],
|
||||
[WORK_ITEM_TYPE_VALUE_KEY_RESULT]: [WORK_ITEM_TYPE_ENUM_OBJECTIVE],
|
||||
[WORK_ITEM_TYPE_VALUE_TASK]: [WORK_ITEM_TYPE_ENUM_ISSUE],
|
||||
[WORK_ITEM_TYPE_VALUE_EPIC]: [WORK_ITEM_TYPE_ENUM_EPIC],
|
||||
};
|
||||
|
||||
export const LINKED_ITEMS_ANCHOR = 'linkeditems';
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ module Groups
|
|||
|
||||
before_action do
|
||||
push_force_frontend_feature_flag(:work_items, group&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_mvc, group&.work_items_mvc_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, group&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_mvc_2, group&.work_items_mvc_2_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:linked_work_items, group&.linked_work_items_feature_flag_enabled?)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class GroupsController < Groups::ApplicationController
|
|||
push_frontend_feature_flag(:or_issuable_queries, group)
|
||||
push_frontend_feature_flag(:frontend_caching, group)
|
||||
push_force_frontend_feature_flag(:work_items, group.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_mvc, group.work_items_mvc_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, group.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_mvc_2, group.work_items_mvc_2_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:linked_work_items, group.linked_work_items_feature_flag_enabled?)
|
||||
push_frontend_feature_flag(:issues_grid_view)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ class Projects::IncidentsController < Projects::ApplicationController
|
|||
before_action :load_incident, only: [:show]
|
||||
before_action do
|
||||
push_force_frontend_feature_flag(:work_items, @project&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_mvc, @project&.work_items_mvc_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, @project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_mvc_2, @project&.work_items_mvc_2_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:linked_work_items, @project&.linked_work_items_feature_flag_enabled?)
|
||||
push_frontend_feature_flag(:notifications_todos_buttons, current_user)
|
||||
|
|
|
|||
|
|
@ -63,8 +63,8 @@ class Projects::IssuesController < Projects::ApplicationController
|
|||
end
|
||||
|
||||
before_action only: :show do
|
||||
push_frontend_feature_flag(:work_items_mvc, project&.group)
|
||||
push_force_frontend_feature_flag(:work_items_mvc, project&.work_items_mvc_feature_flag_enabled?)
|
||||
push_frontend_feature_flag(:work_items_beta, project&.group)
|
||||
push_force_frontend_feature_flag(:work_items_beta, project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_mvc_2, project&.work_items_mvc_2_feature_flag_enabled?)
|
||||
push_frontend_feature_flag(:epic_widget_edit_confirmation, project)
|
||||
push_frontend_feature_flag(:display_work_item_epic_issue_sidebar, project)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class Projects::WorkItemsController < Projects::ApplicationController
|
|||
before_action :authorize_import_access!, only: [:import_csv, :authorize] # rubocop:disable Rails/LexicallyScopedActionFilter
|
||||
before_action do
|
||||
push_force_frontend_feature_flag(:work_items, project&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_mvc, project&.work_items_mvc_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_mvc_2, project&.work_items_mvc_2_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:linked_work_items, project&.linked_work_items_feature_flag_enabled?)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ class ProjectsController < Projects::ApplicationController
|
|||
push_licensed_feature(:file_locks) if @project.present? && @project.licensed_feature_available?(:file_locks)
|
||||
push_licensed_feature(:security_orchestration_policies) if @project.present? && @project.licensed_feature_available?(:security_orchestration_policies)
|
||||
push_force_frontend_feature_flag(:work_items, @project&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_mvc, @project&.work_items_mvc_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, @project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_mvc_2, @project&.work_items_mvc_2_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:linked_work_items, @project&.linked_work_items_feature_flag_enabled?)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -25,11 +25,17 @@ class ActiveSession
|
|||
SESSION_BATCH_SIZE = 200
|
||||
ALLOWED_NUMBER_OF_ACTIVE_SESSIONS = 100
|
||||
|
||||
attr_accessor :ip_address, :browser, :os,
|
||||
ATTR_ACCESSOR_LIST = [
|
||||
:ip_address, :browser, :os,
|
||||
:device_name, :device_type,
|
||||
:is_impersonated, :session_id, :session_private_id
|
||||
].freeze
|
||||
ATTR_READER_LIST = [
|
||||
:created_at, :updated_at
|
||||
].freeze
|
||||
|
||||
attr_reader :created_at, :updated_at
|
||||
attr_accessor(*ATTR_ACCESSOR_LIST)
|
||||
attr_reader(*ATTR_READER_LIST)
|
||||
|
||||
def created_at=(time)
|
||||
@created_at = time.is_a?(String) ? Time.zone.parse(time) : time
|
||||
|
|
@ -240,6 +246,8 @@ class ActiveSession
|
|||
|
||||
if raw_session.start_with?('v2:')
|
||||
session_data = Gitlab::Json.parse(raw_session[3..]).symbolize_keys
|
||||
# load only known attributes
|
||||
session_data.slice!(*ATTR_ACCESSOR_LIST.union(ATTR_READER_LIST))
|
||||
new(**session_data)
|
||||
else
|
||||
# Deprecated legacy format. To be removed in 15.0
|
||||
|
|
|
|||
|
|
@ -877,8 +877,8 @@ class Group < Namespace
|
|||
feature_flag_enabled_for_self_or_ancestor?(:work_items)
|
||||
end
|
||||
|
||||
def work_items_mvc_feature_flag_enabled?
|
||||
feature_flag_enabled_for_self_or_ancestor?(:work_items_mvc)
|
||||
def work_items_beta_feature_flag_enabled?
|
||||
feature_flag_enabled_for_self_or_ancestor?(:work_items_beta, type: :beta)
|
||||
end
|
||||
|
||||
def work_items_mvc_2_feature_flag_enabled?
|
||||
|
|
|
|||
|
|
@ -3149,8 +3149,8 @@ class Project < ApplicationRecord
|
|||
group&.work_items_feature_flag_enabled? || Feature.enabled?(:work_items, self)
|
||||
end
|
||||
|
||||
def work_items_mvc_feature_flag_enabled?
|
||||
group&.work_items_mvc_feature_flag_enabled? || Feature.enabled?(:work_items_mvc)
|
||||
def work_items_beta_feature_flag_enabled?
|
||||
group&.work_items_beta_feature_flag_enabled? || Feature.enabled?(:work_items_beta, type: :beta)
|
||||
end
|
||||
|
||||
def work_items_mvc_2_feature_flag_enabled?
|
||||
|
|
|
|||
|
|
@ -10,29 +10,34 @@ module ClickHouse
|
|||
table_schema = {database_name:String}
|
||||
SQL
|
||||
|
||||
def initialize(connection:, state: {})
|
||||
def initialize(connection:, runtime_limiter: Gitlab::Metrics::RuntimeLimiter.new, state: {})
|
||||
@connection = connection
|
||||
|
||||
@runtime_limiter = runtime_limiter
|
||||
@view_name = state.fetch(:view_name)
|
||||
@tmp_view_name = state.fetch(:tmp_view_name)
|
||||
@view_table_name = state.fetch(:view_table_name)
|
||||
@tmp_view_table_name = state.fetch(:tmp_view_table_name)
|
||||
@source_table_name = state.fetch(:source_table_name)
|
||||
@next_value = state[:next_value]
|
||||
end
|
||||
|
||||
def execute
|
||||
create_tmp_materialized_view_table
|
||||
create_tmp_materialized_view
|
||||
|
||||
backfill_data
|
||||
|
||||
rename_table
|
||||
drop_tmp_tables
|
||||
backfill_data.tap do |service_response|
|
||||
if service_response.payload[:status] == :finished
|
||||
rename_table
|
||||
drop_tmp_tables if Feature.enabled?(:rebuild_mv_drop_old_tables, type: :gitlab_com_derisk)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :connection, :view_name, :tmp_view_name, :view_table_name, :tmp_view_table_name, :source_table_name
|
||||
attr_reader :connection, :view_name, :tmp_view_name, :view_table_name, :tmp_view_table_name, :source_table_name,
|
||||
:next_value, :runtime_limiter
|
||||
|
||||
def create_tmp_materialized_view_table
|
||||
# Create a tmp table from the existing table, use IF NOT EXISTS to avoid failure when the table exists.
|
||||
|
|
@ -64,7 +69,8 @@ module ClickHouse
|
|||
})
|
||||
view_query = connection.select(query).first['view_definition']
|
||||
|
||||
iterator.each_batch(column: :id, of: INSERT_BATCH_SIZE) do |scope|
|
||||
payload = { status: :finished }
|
||||
iterator.each_batch(column: :id, of: INSERT_BATCH_SIZE) do |scope, _min, max|
|
||||
# Use the materialized view query to backfill the new temporary table.
|
||||
# The materialized view query selects from the source table, example: FROM events.
|
||||
# Replace the FROM part and select data from a batched subquery.
|
||||
|
|
@ -76,7 +82,14 @@ module ClickHouse
|
|||
|
||||
# Insert the batch
|
||||
connection.execute("INSERT INTO #{quote(tmp_view_table_name)} #{query}")
|
||||
|
||||
if runtime_limiter.over_time?
|
||||
payload.merge!(status: :over_time, next_value: max + 1)
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
ServiceResponse.success(payload: payload)
|
||||
end
|
||||
|
||||
def rename_table
|
||||
|
|
@ -103,7 +116,8 @@ module ClickHouse
|
|||
|
||||
def iterator
|
||||
builder = ClickHouse::QueryBuilder.new(source_table_name)
|
||||
ClickHouse::Iterator.new(query_builder: builder, connection: connection)
|
||||
ClickHouse::Iterator.new(query_builder: builder, connection: connection, min_value: next_value,
|
||||
min_max_strategy: :order_limit)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -12,21 +12,75 @@ module ClickHouse
|
|||
worker_has_external_dependencies! # the worker interacts with a ClickHouse database
|
||||
feature_category :value_stream_management
|
||||
|
||||
MATERIALIZED_VIEWS = [
|
||||
{
|
||||
view_name: 'contributions_mv',
|
||||
view_table_name: 'contributions',
|
||||
tmp_view_name: 'tmp_contributions_mv',
|
||||
tmp_view_table_name: 'tmp_contributions',
|
||||
source_table_name: 'events'
|
||||
}.freeze
|
||||
].freeze
|
||||
MAX_TTL = 5.minutes
|
||||
MAX_RUNTIME = 4.minutes
|
||||
REBUILDING_SCHEDULE = 1.week
|
||||
MATERIALIZED_VIEW = {
|
||||
view_name: 'contributions_mv',
|
||||
view_table_name: 'contributions',
|
||||
tmp_view_name: 'tmp_contributions_mv',
|
||||
tmp_view_table_name: 'tmp_contributions',
|
||||
source_table_name: 'events'
|
||||
}.freeze
|
||||
|
||||
def self.redis_key
|
||||
"rebuild_click_house_materialized_view:#{MATERIALIZED_VIEW[:view_name]}"
|
||||
end
|
||||
|
||||
def perform
|
||||
connection = ClickHouse::Connection.new(:main)
|
||||
ClickHouse::RebuildMaterializedViewService
|
||||
.new(connection: connection, state: MATERIALIZED_VIEWS.first)
|
||||
.execute
|
||||
return if Feature.disabled?(:rebuild_contributions_mv, type: :gitlab_com_derisk)
|
||||
|
||||
in_lock("#{self.class}:#{MATERIALIZED_VIEW[:view_name]}", ttl: MAX_TTL, retries: 0) do
|
||||
state = build_state
|
||||
|
||||
if state[:finished_at] && DateTime.parse(Gitlab::Json.parse(state[:finished_at])) > REBUILDING_SCHEDULE.ago
|
||||
break
|
||||
end
|
||||
|
||||
service_response = ClickHouse::RebuildMaterializedViewService
|
||||
.new(
|
||||
connection: ClickHouse::Connection.new(:main),
|
||||
runtime_limiter: Gitlab::Metrics::RuntimeLimiter.new(MAX_RUNTIME),
|
||||
state: state)
|
||||
.execute
|
||||
|
||||
payload = service_response.payload
|
||||
current_time = Time.current.to_json
|
||||
if payload[:status] == :over_time
|
||||
state.merge!(
|
||||
next_value: payload[:next_value],
|
||||
last_update_at: current_time,
|
||||
finished_at: nil
|
||||
)
|
||||
else
|
||||
state.merge!(
|
||||
next_value: nil,
|
||||
last_update_at: current_time,
|
||||
finished_at: current_time,
|
||||
started_at: nil
|
||||
)
|
||||
end
|
||||
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
redis.set(self.class.redis_key, Gitlab::Json.dump(state))
|
||||
end
|
||||
|
||||
log_extra_metadata_on_done(:state, state)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def build_state
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
raw = redis.get(self.class.redis_key)
|
||||
state = raw.present? ? Gitlab::Json.parse(raw) : {}
|
||||
state.merge(initial_state).symbolize_keys
|
||||
end
|
||||
end
|
||||
|
||||
def initial_state
|
||||
MATERIALIZED_VIEW.merge(started_at: Time.current)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: work_items_beta
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/377912
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/144141
|
||||
rollout_issue_url: https://gitlab.com/gitlab-com/gl-infra/production/-/issues/17549
|
||||
milestone: '16.10'
|
||||
group: group::project management
|
||||
type: beta
|
||||
default_enabled: false
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
name: work_items_mvc
|
||||
introduced_by_url: "https://gitlab.com/gitlab-org/gitlab/-/merge_requests/101062"
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/377912
|
||||
milestone: '15.5'
|
||||
type: development
|
||||
group: group::project management
|
||||
default_enabled: false
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: rebuild_contributions_mv
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/431453
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/144478
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/441620
|
||||
milestone: '16.10'
|
||||
group: group::optimize
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: rebuild_mv_drop_old_tables
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/431453
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/144478
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/441584
|
||||
milestone: '16.10'
|
||||
group: group::optimize
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -918,6 +918,9 @@ Gitlab.ee do
|
|||
Settings.cron_jobs['click_house_audit_events_sync_worker'] ||= {}
|
||||
Settings.cron_jobs['click_house_audit_events_sync_worker']['cron'] ||= "*/3 * * * *"
|
||||
Settings.cron_jobs['click_house_audit_events_sync_worker']['job_class'] = 'ClickHouse::AuditEventsSyncWorker'
|
||||
Settings.cron_jobs['click_house_rebuild_materialized_view_cron_worker'] ||= {}
|
||||
Settings.cron_jobs['click_house_rebuild_materialized_view_cron_worker']['cron'] ||= "*/10 * * * *"
|
||||
Settings.cron_jobs['click_house_rebuild_materialized_view_cron_worker']['job_class'] = 'ClickHouse::RebuildMaterializedViewCronWorker'
|
||||
Settings.cron_jobs['vertex_ai_refresh_access_token_worker'] ||= {}
|
||||
Settings.cron_jobs['vertex_ai_refresh_access_token_worker']['cron'] ||= '*/50 * * * *'
|
||||
Settings.cron_jobs['vertex_ai_refresh_access_token_worker']['job_class'] = 'Llm::VertexAiAccessTokenRefreshWorker'
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ name: quickactions_checkin_reminder_weekly
|
|||
description: Count of WAU using the `/checkin_reminder` quick action
|
||||
product_section: dev
|
||||
product_stage: plan
|
||||
product_group: team_planning
|
||||
product_group: project_management
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: "16.4"
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ description: Cumulative count of packets processed by ModSecurity since Usage Pi
|
|||
was last reported
|
||||
product_section: sec
|
||||
product_stage: protect_stage_was_removed
|
||||
product_group: container_security_group_was_removed
|
||||
product_group: security_policies
|
||||
value_type: number
|
||||
status: removed
|
||||
milestone_removed: "14.0"
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ description: Cumulative count of packets identified as anomalous by ModSecurity
|
|||
Usage Ping was last reported
|
||||
product_section: sec
|
||||
product_stage: protect_stage_was_removed
|
||||
product_group: container_security_group_was_removed
|
||||
product_group: security_policies
|
||||
value_type: number
|
||||
status: removed
|
||||
milestone_removed: "14.0"
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ description: Cumulative count of packets forwarded by Cilium (Container Network
|
|||
since Usage Ping was last reported
|
||||
product_section: sec
|
||||
product_stage: protect_stage_was_removed
|
||||
product_group: container_security_group_was_removed
|
||||
product_group: security_policies
|
||||
value_type: number
|
||||
status: removed
|
||||
removed_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/86351
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ description: Cumulative count of packets dropped by Cilium (Container Network Se
|
|||
since Usage Ping was last reported
|
||||
product_section: sec
|
||||
product_stage: protect_stage_was_removed
|
||||
product_group: container_security_group_was_removed
|
||||
product_group: security_policies
|
||||
value_type: number
|
||||
status: removed
|
||||
removed_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/86351
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ key_path: counts.ingress_modsecurity_logging
|
|||
description: Whether or not ModSecurity is set to logging mode
|
||||
product_section: sec
|
||||
product_stage: protect_stage_was_removed
|
||||
product_group: container_security_group_was_removed
|
||||
product_group: security_policies
|
||||
value_type: number
|
||||
status: removed
|
||||
milestone_removed: "14.0"
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ key_path: counts.ingress_modsecurity_blocking
|
|||
description: Whether or not ModSecurity is set to blocking mode
|
||||
product_section: sec
|
||||
product_stage: protect_stage_was_removed
|
||||
product_group: container_security_group_was_removed
|
||||
product_group: security_policies
|
||||
value_type: number
|
||||
status: removed
|
||||
milestone_removed: "14.0"
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ key_path: counts.ingress_modsecurity_disabled
|
|||
description: Whether or not ModSecurity is disabled within Ingress
|
||||
product_section: sec
|
||||
product_stage: protect_stage_was_removed
|
||||
product_group: container_security_group_was_removed
|
||||
product_group: security_policies
|
||||
value_type: number
|
||||
status: removed
|
||||
milestone_removed: "14.0"
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ key_path: counts.ingress_modsecurity_not_installed
|
|||
description: Whether or not ModSecurity has not been installed into the cluster
|
||||
product_section: sec
|
||||
product_stage: protect_stage_was_removed
|
||||
product_group: container_security_group_was_removed
|
||||
product_group: security_policies
|
||||
value_type: number
|
||||
status: removed
|
||||
milestone_removed: "14.0"
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ key_path: ingress_modsecurity_enabled
|
|||
description: Whether or not ModSecurity is enabled within Ingress
|
||||
product_section: sec
|
||||
product_stage: protect_stage_was_removed
|
||||
product_group: container_security_group_was_removed
|
||||
product_group: security_policies
|
||||
value_type: boolean
|
||||
status: removed
|
||||
milestone_removed: "14.0"
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ DETAILS:
|
|||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/279039) in GitLab 13.10.
|
||||
> - The legacy key/value pair `{ "<date>" => "<value>" }` was removed from the payload in GitLab 14.0.
|
||||
> `time_to_restore_service` metric was introduced in GitLab 14.9.
|
||||
> - `time_to_restore_service` metric was introduced in GitLab 14.9.
|
||||
|
||||
You can also retrieve [DORA metrics](../../user/analytics/dora_metrics.md) with the [GraphQL API](../../api/graphql/reference/index.md).
|
||||
|
||||
|
|
|
|||
|
Before Width: | Height: | Size: 240 KiB After Width: | Height: | Size: 89 KiB |
|
Before Width: | Height: | Size: 150 KiB After Width: | Height: | Size: 54 KiB |
|
Before Width: | Height: | Size: 152 KiB After Width: | Height: | Size: 59 KiB |
|
Before Width: | Height: | Size: 270 KiB After Width: | Height: | Size: 91 KiB |
|
|
@ -85,6 +85,7 @@ All Work Item types share the same pool of predefined widgets and are customized
|
|||
| [WorkItemWidgetStatus](../../../api/graphql/reference/index.md#workitemwidgetstatus) | Status of a work item when type is Requirement, with possible status types being `unverified`, `satisfied`, or `failed` | | |No|
|
||||
| [WorkItemWidgetTestReports](../../../api/graphql/reference/index.md#workitemwidgettestreports) | Test reports associated with a work item | | | |
|
||||
| [WorkItemWidgetWeight](../../../api/graphql/reference/index.md#workitemwidgetweight) | Set weight of a work item | |`Reporter`|No|
|
||||
| WorkItemWidgetLock | Lock/Unlock a work item | |`Reporter`|No|
|
||||
|
||||
#### Widget availability (updating)
|
||||
|
||||
|
|
@ -144,12 +145,12 @@ Task is a special Work Item type. Tasks can be added to issues as child items an
|
|||
|
||||
### Feature flags
|
||||
|
||||
Since this is a large project with numerous moving parts, feature flags are being used to track promotions of available widgets. The table below shows the different feature flags that are being used, and the audience that they are available to.
|
||||
Since this is a large project with numerous moving parts, feature flags are being used to track promotions of available widgets. The table below shows the different feature flags that are being used, and the audience that they are available to.
|
||||
|
||||
| feature flag name | audience |
|
||||
|---|---|
|
||||
| `work_items` | defaulted to on |
|
||||
| `work_items_mvc` | `gitlab-org`, `gitlab-com` |
|
||||
| `work_items_beta` | `gitlab-org`, `gitlab-com` |
|
||||
| `work_items_mvc_2` | `gitlab-org/plan-stage` |
|
||||
|
||||
## Motivation
|
||||
|
|
|
|||
|
|
@ -308,6 +308,19 @@ included in backticks. For example:
|
|||
- `git clone` is a command, so it must be lowercase, while Git is the product,
|
||||
so it must have a capital G.
|
||||
|
||||
### Mermaid
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/144328) in GitLab 16.10.
|
||||
|
||||
[Mermaid](https://mermaid.js.org/) builds charts and diagrams from code.
|
||||
|
||||
The `mermaidlint` job runs on merge requests that contain changes to Markdown files.
|
||||
The script (`scripts/lint/check_mermaid.mjs`) returns an error if any Markdown
|
||||
files return a Mermaid syntax error.
|
||||
|
||||
To help debug your Mermaid charts, use the
|
||||
[Mermaid Live Editor](https://mermaid-js.github.io/mermaid-live-editor/edit).
|
||||
|
||||
### Vale
|
||||
|
||||
[Vale](https://vale.sh/) is a grammar, style, and word usage linter for the
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ Twitter. Twitter generates a client ID and secret key for you to use.
|
|||
|
||||
1. Fill in the application details.
|
||||
- **Name**: This can be anything. Consider something like `<Organization>'s GitLab`, `<Your Name>'s GitLab` or
|
||||
something else descriptive.
|
||||
something else descriptive.
|
||||
- **Description**: Create a description.
|
||||
- **Website**: The URL to your GitLab installation. For example, `https://gitlab.example.com`
|
||||
- **Callback URL**: `https://gitlab.example.com/users/auth/twitter/callback`
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ To enable the Sentry integration:
|
|||
For the SaaS version of Sentry, the hostname is `https://sentry.io`.
|
||||
1. Under **Auth Token**, enter the token you previously generated.
|
||||
1. To test the connection to Sentry and populate the **Project** dropdown list,
|
||||
select **Connect**.
|
||||
select **Connect**.
|
||||
1. From the **Project** list, choose a Sentry project to link to your GitLab project.
|
||||
1. Select **Save changes**.
|
||||
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ If GitLab is in FIPS mode, use the following:
|
|||
- Use `RSA`, set to **Must be at least 2048 bits**.
|
||||
- Use `ECDSA` (and `ECDSA-SK`), set to **Must be at least 256 bits**.
|
||||
- Set all other key types to **Are forbidden**.
|
||||
`RSA` and `ECDSA` are both approved for FIPS use.
|
||||
`RSA` and `ECDSA` are both approved for FIPS use.
|
||||
- If not running in FIPS mode, you must use `ED25519` and can also use `RSA`:
|
||||
- Set `ED25519` (and `ED25519-SK`) to **Must be at least 256 bits**.
|
||||
- If using `RSA`, set it to **Must be at least 2048 bits**.
|
||||
|
|
|
|||
|
|
@ -275,18 +275,20 @@ The following tables show the prefixes for each type of token where applicable.
|
|||
1. Treat access tokens like passwords and keep them secure.
|
||||
1. When creating a scoped token, consider using the most limited scope possible to reduce the impact of accidentally leaking the token.
|
||||
1. When creating a token, consider setting a token that expires when your task is complete. For example, if performing a one-off import, set the
|
||||
token to expire after a few hours or a day. This reduces the impact of a token that is accidentally leaked because it is useless when it expires.
|
||||
token to expire after a few hours or a day. This reduces the impact of a token that is accidentally leaked because it is useless when it expires.
|
||||
1. If you have set up a demo environment to showcase a project you have been working on and you are recording a video or writing a blog post describing that project, make sure you are not leaking sensitive secrets (for example a personal access token (PAT), feed token or trigger token) during that process. If you have finished the demo, you must revoke all the secrets created during that demo. For more information, see [revoking a PAT](../user/profile/personal_access_tokens.md#revoke-a-personal-access-token).
|
||||
1. Adding access tokens to URLs is a security risk, especially when cloning or adding a remote because Git then writes the URL to its `.git/config` file in plain text. URLs are
|
||||
also generally logged by proxies and application servers, which makes those credentials visible to system administrators. Instead, pass API calls an access token using
|
||||
headers like [the `Private-Token` header](../api/rest/index.md#personalprojectgroup-access-tokens).
|
||||
also generally logged by proxies and application servers, which makes those credentials visible to system administrators. Instead, pass API calls an access token using
|
||||
headers like [the `Private-Token` header](../api/rest/index.md#personalprojectgroup-access-tokens).
|
||||
1. You can also store token using a [Git credential storage](https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage).
|
||||
1. Do not:
|
||||
|
||||
- Store tokens in plain text in your projects.
|
||||
- Include tokens when pasting code, console commands, or log outputs into an issue, MR description, or comment.
|
||||
|
||||
Consider an approach such as [using external secrets in CI](../ci/secrets/index.md).
|
||||
1. Do not log credentials in the console logs or artifacts. Consider [protecting](../ci/variables/index.md#protect-a-cicd-variable) and
|
||||
[masking](../ci/variables/index.md#mask-a-cicd-variable) your credentials.
|
||||
[masking](../ci/variables/index.md#mask-a-cicd-variable) your credentials.
|
||||
1. Review all active access tokens of all types on a regular basis and revoke any that are no longer needed. This includes:
|
||||
- Personal, project, and group access tokens.
|
||||
- Feed tokens.
|
||||
|
|
|
|||
|
|
@ -120,9 +120,9 @@ Consider an example upstream project, `git@gitlab.com:gitlab-tests/test-git-lfs-
|
|||
so that we can force-push the rewritten repository:
|
||||
|
||||
1. Navigate to your project's **Settings > Repository** and
|
||||
expand **Protected branches**.
|
||||
expand **Protected branches**.
|
||||
1. Scroll down to locate the protected branches and select
|
||||
**Unprotect** the default branch.
|
||||
**Unprotect** the default branch.
|
||||
|
||||
1. Force-push to GitLab:
|
||||
|
||||
|
|
@ -158,10 +158,10 @@ Consider an example upstream project, `git@gitlab.com:gitlab-tests/test-git-lfs-
|
|||
1. [Re-protect the default branch](../../../user/project/protected_branches.md):
|
||||
|
||||
1. Navigate to your project's **Settings > Repository** and
|
||||
expand **Protected branches**.
|
||||
expand **Protected branches**.
|
||||
1. Select the default branch from the **Branch** dropdown list,
|
||||
and set up the
|
||||
**Allowed to push and merge** and **Allowed to merge** rules.
|
||||
and set up the
|
||||
**Allowed to push and merge** and **Allowed to merge** rules.
|
||||
1. Select **Protect**.
|
||||
|
||||
<!-- ## Troubleshooting
|
||||
|
|
|
|||
|
|
@ -133,15 +133,15 @@ a fake email, you must set the user's password without using the email confirmat
|
|||
|
||||
You have created the first test user. Now repeat this for the other users:
|
||||
|
||||
| Name | Username | Email |
|
||||
|:------------------|:-----------------|:-----------------------------|
|
||||
| `Blake Wang` | `blakewang` | `blakewang@example.com` |
|
||||
| `Charlie Devi` | `charliedevi` | `charliedevi@example.com` |
|
||||
| `Devon Ivanov` | `devonivanov` | `devonivanov@example.com` |
|
||||
| `Evan Kim` | `evankim` | `evankim@example.com` |
|
||||
| `Frankie Ali` | `frankieali` | `frankieali@example.com` |
|
||||
| `Grayson Garcia` | `graysongarcia` | `graysongarcia@example.com` |
|
||||
| `Hunter Silva` | `huntersilva` | `huntersilva@example.com` |
|
||||
| Name | Username | Email |
|
||||
|------------------|-----------------|-------|
|
||||
| `Blake Wang` | `blakewang` | `blakewang@example.com` |
|
||||
| `Charlie Devi` | `charliedevi` | `charliedevi@example.com` |
|
||||
| `Devon Ivanov` | `devonivanov` | `devonivanov@example.com` |
|
||||
| `Evan Kim` | `evankim` | `evankim@example.com` |
|
||||
| `Frankie Ali` | `frankieali` | `frankieali@example.com` |
|
||||
| `Grayson Garcia` | `graysongarcia` | `graysongarcia@example.com` |
|
||||
| `Hunter Silva` | `huntersilva` | `huntersilva@example.com` |
|
||||
|
||||
You have created the users for your organization. Next you will add these users
|
||||
to the different groups and subgroups.
|
||||
|
|
@ -162,15 +162,15 @@ First, you will add all the users to the parent group, Development.
|
|||
1. Select **Invite**.
|
||||
1. Repeat this process for the following users:
|
||||
|
||||
| User | Role | Access expiration date |
|
||||
|:----------------|:------------|:------------------------|
|
||||
| Blake Wang | Maintainer | Leave blank |
|
||||
| Charlie Devi | Developer | Leave blank |
|
||||
| Devon Ivanov | Developer | Leave blank |
|
||||
| Evan Kim | Developer | Leave blank |
|
||||
| Frankie Ali | Reporter | Leave blank |
|
||||
| Grayson Garcia | Reporter | Leave blank |
|
||||
| Hunter Silva | Guest | `2025-12-31` |
|
||||
| User | Role | Access expiration date |
|
||||
|----------------|------------|------------------------|
|
||||
| Blake Wang | Maintainer | Leave blank |
|
||||
| Charlie Devi | Developer | Leave blank |
|
||||
| Devon Ivanov | Developer | Leave blank |
|
||||
| Evan Kim | Developer | Leave blank |
|
||||
| Frankie Ali | Reporter | Leave blank |
|
||||
| Grayson Garcia | Reporter | Leave blank |
|
||||
| Hunter Silva | Guest | `2025-12-31` |
|
||||
|
||||
You can invite multiple users at the same time if they have the same role and
|
||||
access expiration date.
|
||||
|
|
@ -277,12 +277,12 @@ You are now going to invite some users to the Engineering subgroup.
|
|||
1. Select **Invite members**.
|
||||
1. Complete the fields for the following members:
|
||||
|
||||
| User | Role | Access expiration date |
|
||||
|:----------------|:------------|:------------------------|
|
||||
| Blake Wang | Maintainer | Leave blank |
|
||||
| Charlie Devi | Developer | Leave blank |
|
||||
| Devon Ivanov | Developer | Leave blank |
|
||||
| Evan Kim | Developer | Leave blank |
|
||||
| User | Role | Access expiration date |
|
||||
|--------------|------------|------------------------|
|
||||
| Blake Wang | Maintainer | Leave blank |
|
||||
| Charlie Devi | Developer | Leave blank |
|
||||
| Devon Ivanov | Developer | Leave blank |
|
||||
| Evan Kim | Developer | Leave blank |
|
||||
|
||||
1. Select **Invite**.
|
||||
|
||||
|
|
@ -315,23 +315,23 @@ included in both nested subgroups due to inherited permissions.
|
|||
Therefore, you will add these users to the appropriate nested subgroup directly
|
||||
rather than to the User Experience subgroup.
|
||||
|
||||
1. 1. On the left sidebar, select **Search or go to** and find the **Development** group.
|
||||
1. On the left sidebar, select **Search or go to** and find the **Development** group.
|
||||
1. Select the **User Experience** subgroup, and then the **UX Design** subgroup.
|
||||
1. On the left sidebar, select **Subgroup information > Members**. You and Alex
|
||||
Smith are currently the only members. These are inherited roles.
|
||||
1. Select **Invite members**.
|
||||
1. Complete the fields and select **Invite** for the following members:
|
||||
|
||||
| User | Role | Access expiration date |
|
||||
|:----------------|:------------|:------------------------|
|
||||
| Frankie Ali | Maintainer | Leave blank |
|
||||
| Hunter Silva | Guest | `2025-12-31` |
|
||||
| User | Role | Access expiration date |
|
||||
|--------------|------------|------------------------|
|
||||
| Frankie Ali | Maintainer | Leave blank |
|
||||
| Hunter Silva | Guest | `2025-12-31` |
|
||||
|
||||
1. Repeat for the **Technical Writing** subgroup:
|
||||
|
||||
| User | Role | Access expiration date |
|
||||
|:----------------|:------------|:------------------------|
|
||||
| Grayson Garcia | Maintainer | Leave blank |
|
||||
| User | Role | Access expiration date |
|
||||
|----------------|------------|------------------------|
|
||||
| Grayson Garcia | Maintainer | Leave blank |
|
||||
|
||||
You have added the users to their appropriate nested subgroups. You decide that
|
||||
Grayson Garcia should be in the **User Experience** subgroup as well.
|
||||
|
|
@ -407,11 +407,11 @@ directly to the project.
|
|||
1. On the left sidebar, select **Manage > Members**.
|
||||
1. Select **Invite members**. Invite the following users:
|
||||
|
||||
| User | Role | Access expiration date |
|
||||
|:----------------|:--------------|:------------------------|
|
||||
| Charlie Devi | Maintainer | Leave blank |
|
||||
| Frankie Ali | Maintainer | Leave blank |
|
||||
| Grayson Garcia | Maintainer | Leave blank |
|
||||
| User | Role | Access expiration date |
|
||||
|----------------|------------|------------------------|
|
||||
| Charlie Devi | Maintainer | Leave blank |
|
||||
| Frankie Ali | Maintainer | Leave blank |
|
||||
| Grayson Garcia | Maintainer | Leave blank |
|
||||
|
||||
1. Select **Invite**.
|
||||
1. Because you added these users directly to the project, you can change
|
||||
|
|
|
|||
|
|
@ -124,10 +124,10 @@ to your instance and then upgrade it for any relevant features you're using.
|
|||
- About PostgreSQL:
|
||||
1. On the left sidebar, at the bottom, select **Admin Area**..
|
||||
1. Look for the version of PostgreSQL you are using.
|
||||
If [a PostgreSQL upgrade is needed](../administration/package_information/postgresql_versions.md),
|
||||
account for the relevant
|
||||
[packaged](https://docs.gitlab.com/omnibus/settings/database.html#upgrade-packaged-postgresql-server)
|
||||
or [non-packaged](https://docs.gitlab.com/omnibus/settings/database.html#upgrade-a-non-packaged-postgresql-database) steps.
|
||||
If [a PostgreSQL upgrade is needed](../administration/package_information/postgresql_versions.md),
|
||||
account for the relevant
|
||||
[packaged](https://docs.gitlab.com/omnibus/settings/database.html#upgrade-packaged-postgresql-server)
|
||||
or [non-packaged](https://docs.gitlab.com/omnibus/settings/database.html#upgrade-a-non-packaged-postgresql-database) steps.
|
||||
|
||||
### Additional features
|
||||
|
||||
|
|
|
|||
|
|
@ -15,8 +15,8 @@ without having to take your GitLab instance offline. However, for this to work
|
|||
there are the following requirements:
|
||||
|
||||
- You can only upgrade one minor release at a time. So from 13.1 to 13.2, not to
|
||||
13.3. If you skip releases, database modifications may be run in the wrong
|
||||
sequence [and leave the database schema in a broken state](https://gitlab.com/gitlab-org/gitlab/-/issues/321542).
|
||||
13.3. If you skip releases, database modifications may be run in the wrong
|
||||
sequence [and leave the database schema in a broken state](https://gitlab.com/gitlab-org/gitlab/-/issues/321542).
|
||||
- You have to use [post-deployment migrations](../development/database/post_deployment_migrations.md).
|
||||
- You are using PostgreSQL. Starting from GitLab 12.1, MySQL is not supported.
|
||||
- You have set up a multi-node GitLab instance. Cloud Native Hybrid installations do [not support zero-downtime upgrades](../administration/reference_architectures/index.md#zero-downtime-upgrades).
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ Prerequisites:
|
|||
1. Optional. Filter results:
|
||||
1. From the **Projects** dropdown list, select a project.
|
||||
1. To filter results by author, milestone, or label,
|
||||
select **Filter results...** and enter a value.
|
||||
select **Filter results...** and enter a value.
|
||||
1. To adjust the date range:
|
||||
- In the **From** field, select a start date.
|
||||
- In the **To** field, select an end date.
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ To enable coverage-guided fuzz testing, edit `.gitlab-ci.yml`:
|
|||
1. Add the `fuzz` stage to the list of stages.
|
||||
|
||||
1. If your application is not written in Go, [provide a Docker image](../../../ci/yaml/index.md#image) using the matching fuzzing
|
||||
engine. For example:
|
||||
engine. For example:
|
||||
|
||||
```yaml
|
||||
image: python:latest
|
||||
|
|
|
|||
|
|
@ -38,11 +38,11 @@ After you've gotten familiar with how scanning works, you can then choose to:
|
|||
to identify any leaked secrets and vulnerable packages in that project.
|
||||
- Security scanners run in your project's [CI/CD pipelines](../../ci/pipelines/index.md). Creating a merge request to update your [`.gitlab-ci.yml`](../../ci/index.md#the-gitlab-ciyml-file) helps you check how the scanners work with your project before they start running in every pipeline. In the merge request, you can change relevant [Secret Detection settings](secret_detection/index.md#configure-scan-settings) or [Dependency Scanning settings](dependency_scanning/index.md#available-cicd-variables) to accommodate your project's layout or configuration. For example, you might choose to exclude a directory of third-party code from scanning.
|
||||
- After you merge this MR to your [default branch](../project/repository/branches/default.md), the system creates a baseline scan. This scan identifies which vulnerabilities already exist on the default branch so [merge requests](../project/merge_requests/index.md) can highlight only newly-introduced problems. Without a baseline scan, merge requests display every
|
||||
vulnerability in the branch, even if the vulnerability already exists on the default branch.
|
||||
vulnerability in the branch, even if the vulnerability already exists on the default branch.
|
||||
1. Let your team get comfortable with [viewing security findings in merge requests](index.md#view-security-scan-information) and the [vulnerability report](vulnerability_report/index.md).
|
||||
1. Establish a vulnerability triage workflow.
|
||||
- Consider creating [labels](../project/labels.md) and [issue boards](../project/issue_board.md) to
|
||||
help manage issues created from vulnerabilities. Issue boards allow all stakeholders to have a
|
||||
help manage issues created from vulnerabilities. Issue boards allow all stakeholders to have a
|
||||
common view of all issues and track remediation progress.
|
||||
1. Monitor the [Security Dashboard](security_dashboard/index.md) trends to gauge success in remediating existing vulnerabilities and preventing the introduction of new ones.
|
||||
1. Enforce scheduled security scanning jobs by using a [scan execution policy](policies/scan-execution-policies.md).
|
||||
|
|
|
|||
|
|
@ -551,7 +551,7 @@ GitLab provides two methods of accomplishing this, each with advantages and disa
|
|||
|
||||
- Scan execution enforcement is required for DAST which uses a DAST site or scan profile.
|
||||
- Scan execution enforcement is required for SAST, SAST IaC, Secret Detection, Dependency Scanning, or Container Scanning with project-specific
|
||||
variable customizations. To accomplish this, users must create a separate security policy per project.
|
||||
variable customizations. To accomplish this, users must create a separate security policy per project.
|
||||
- Scans are required to run on a regular, scheduled cadence.
|
||||
|
||||
- Either solution can be used equally well when:
|
||||
|
|
|
|||
|
|
@ -230,11 +230,11 @@ A table displays the member's:
|
|||
|
||||
- **Account** name and username.
|
||||
- **Source** of their [membership](../project/members/index.md#membership-types).
|
||||
For transparency, GitLab displays all membership sources of group members.
|
||||
Members who have multiple membership sources are displayed and counted as separate members.
|
||||
For example, if a member has been added to the group both directly and through inheritance,
|
||||
the member is displayed twice in the **Members** table, with different sources,
|
||||
and is counted as two individual members of the group.
|
||||
For transparency, GitLab displays all membership sources of group members.
|
||||
Members who have multiple membership sources are displayed and counted as separate members.
|
||||
For example, if a member has been added to the group both directly and through inheritance,
|
||||
the member is displayed twice in the **Members** table, with different sources,
|
||||
and is counted as two individual members of the group.
|
||||
- [**Max role**](../project/members/index.md#which-roles-you-can-assign) in the group.
|
||||
- **Expiration** date of their group membership.
|
||||
- **Activity** related to their account.
|
||||
|
|
@ -323,8 +323,8 @@ Prerequisites:
|
|||
|
||||
- GitLab username, the user is added to the member list.
|
||||
- Email address, the user receives an email invitation and is prompted to create an account.
|
||||
If the invitation is not accepted, GitLab sends reminder emails two, five, and ten days later.
|
||||
Unaccepted invites are automatically deleted after 90 days.
|
||||
If the invitation is not accepted, GitLab sends reminder emails two, five, and ten days later.
|
||||
Unaccepted invites are automatically deleted after 90 days.
|
||||
|
||||
Members that are not automatically added are displayed on the **Invited** tab.
|
||||
This tab includes users who:
|
||||
|
|
|
|||
|
|
@ -260,7 +260,7 @@ After you set up your identity provider to work with GitLab, you must configure
|
|||
1. Optional. Select:
|
||||
- **Enforce SSO-only authentication for web activity for this group**.
|
||||
- **Enforce SSO-only authentication for Git activity for this group**.
|
||||
For more information, see the [SSO enforcement documentation](#sso-enforcement).
|
||||
For more information, see the [SSO enforcement documentation](#sso-enforcement).
|
||||
1. Select **Save changes**.
|
||||
|
||||
NOTE:
|
||||
|
|
|
|||
|
|
@ -196,8 +196,10 @@ this:
|
|||
1. Locate your SCIM token.
|
||||
1. Use the API to [get a single SCIM provisioned user](/ee/development/internal_api/index.md#get-a-single-scim-provisioned-user).
|
||||
1. Check the returned information to make sure that:
|
||||
|
||||
- The user's identifier (`id`) and email match what your identity provider is sending.
|
||||
- `active` is set to `false`.
|
||||
|
||||
If any of this information does not match, [contact GitLab Support](https://support.gitlab.com/).
|
||||
1. Use the API to [update the SCIM provisioned user's `active` value to `true`](/ee/development/internal_api/index.md#update-a-single-scim-provisioned-user).
|
||||
1. If the update returns a status code `204`, have the user attempt to sign in
|
||||
|
|
|
|||
|
|
@ -1733,15 +1733,15 @@ When creating tables:
|
|||
- The first line contains the headers, separated by "pipes" (`|`).
|
||||
- The second line separates the headers from the cells.
|
||||
- The cells can contain only empty spaces, hyphens, and
|
||||
(optionally) colons for horizontal alignment.
|
||||
(optionally) colons for horizontal alignment.
|
||||
- Each cell must contain at least one hyphen, but adding more hyphens to a
|
||||
cell does not change the cell's rendering.
|
||||
cell does not change the cell's rendering.
|
||||
- Any content other than hyphens, whitespace, or colons is not allowed
|
||||
- The third, and any following lines, contain the cell values.
|
||||
- You **can't** have cells separated over many lines in the Markdown, they must be kept to single lines,
|
||||
but they can be very long. You can also include HTML `<br>` tags to force newlines if needed.
|
||||
but they can be very long. You can also include HTML `<br>` tags to force newlines if needed.
|
||||
- The cell sizes **don't** have to match each other. They are flexible, but must be separated
|
||||
by pipes (`|`).
|
||||
by pipes (`|`).
|
||||
- You **can** have blank cells.
|
||||
- Column widths are calculated dynamically based on the content of the cells.
|
||||
- To use the pipe character (`|`) in the text and not as table delimiter, you must escape it with a backslash (`\|`).
|
||||
|
|
|
|||
|
|
@ -485,10 +485,10 @@ system note in the OKR's comments, for example:
|
|||
|
||||
## Lock discussion
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/398649) in GitLab 16.9 [with a flag](../administration/feature_flags.md) named `work_items_mvc`. Disabled by default.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/398649) in GitLab 16.9 [with a flag](../administration/feature_flags.md) named `work_items_beta`. Disabled by default.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available, an administrator can [enable the feature flag](../administration/feature_flags.md) named `work_items_mvc`.
|
||||
On self-managed GitLab, by default this feature is not available. To make it available, an administrator can [enable the feature flag](../administration/feature_flags.md) named `work_items_beta`.
|
||||
On GitLab.com, this feature is not available.
|
||||
This feature is not ready for production use.
|
||||
|
||||
|
|
|
|||
|
|
@ -143,12 +143,12 @@ see [Container registry visibility permissions](#container-registry-visibility-p
|
|||
1. Under **Container Registry**, select an option from the dropdown list:
|
||||
|
||||
- **Everyone With Access** (Default): The container registry is visible to everyone with access
|
||||
to the project. If the project is public, the container registry is also public. If the project
|
||||
is internal or private, the container registry is also internal or private.
|
||||
to the project. If the project is public, the container registry is also public. If the project
|
||||
is internal or private, the container registry is also internal or private.
|
||||
|
||||
- **Only Project Members**: The container registry is visible only to project members with
|
||||
at least the Reporter role. This visibility is similar to the behavior of a private project with Container
|
||||
Registry visibility set to **Everyone With Access**.
|
||||
at least the Reporter role. This visibility is similar to the behavior of a private project with Container
|
||||
Registry visibility set to **Everyone With Access**.
|
||||
|
||||
1. Select **Save changes**.
|
||||
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ Prerequisites:
|
|||
- Your namespace must not:
|
||||
- Contain a project with [Container Registry](../packages/container_registry/index.md) tags.
|
||||
- Have a project that hosts [GitLab Pages](../project/pages/index.md). For more information,
|
||||
see [changing your username in the GitLab Team Handbook](https://handbook.gitlab.com/handbook/tools-and-tips/#change-your-username-at-gitlabcom).
|
||||
see [changing your username in the GitLab Team Handbook](https://handbook.gitlab.com/handbook/tools-and-tips/#change-your-username-at-gitlabcom).
|
||||
- Your username must be between 2 and 255 characters in length, and must not:
|
||||
- Contain special characters or emoji.
|
||||
- End with `.<reserved file extension>`, for example `jon.png`. However, `jonpng` is valid.
|
||||
|
|
|
|||
|
|
@ -29,8 +29,8 @@ A deploy key has a defined scope when it is created:
|
|||
|
||||
- **Project deploy key:** Access is limited to the selected project.
|
||||
- **Public deploy key:** Access can be granted to _any_ project in a GitLab instance. Access to each
|
||||
project must be [granted](#grant-project-access-to-a-public-deploy-key) by a user with at least
|
||||
the Maintainer role.
|
||||
project must be [granted](#grant-project-access-to-a-public-deploy-key) by a user with at least
|
||||
the Maintainer role.
|
||||
|
||||
You cannot change a deploy key's scope after creating it.
|
||||
|
||||
|
|
|
|||
|
|
@ -92,8 +92,8 @@ To add a user to a project:
|
|||
|
||||
- GitLab username, they are added to the members list.
|
||||
- Email address, an invitation is sent to their email address, and they are prompted to create an account.
|
||||
If the invitation is not accepted, GitLab sends reminder emails two, five, and ten days later.
|
||||
Unaccepted invites are automatically deleted after 90 days.
|
||||
If the invitation is not accepted, GitLab sends reminder emails two, five, and ten days later.
|
||||
Unaccepted invites are automatically deleted after 90 days.
|
||||
|
||||
### Which roles you can assign
|
||||
|
||||
|
|
|
|||
|
|
@ -269,9 +269,9 @@ meet these requirements.
|
|||
|
||||
1. Add the PEM certificate to its corresponding field.
|
||||
1. If your certificate is missing its intermediate, copy
|
||||
and paste the root certificate (usually available from your CA website)
|
||||
and paste it in the [same field as your PEM certificate](https://about.gitlab.com/blog/2017/02/07/setting-up-gitlab-pages-with-cloudflare-certificates/),
|
||||
just jumping a line between them.
|
||||
and paste the root certificate (usually available from your CA website)
|
||||
and paste it in the [same field as your PEM certificate](https://about.gitlab.com/blog/2017/02/07/setting-up-gitlab-pages-with-cloudflare-certificates/),
|
||||
just jumping a line between them.
|
||||
1. Copy your private key and paste it in the last field.
|
||||
|
||||
**Do not** open certificates or encryption keys in
|
||||
|
|
|
|||
|
|
@ -38,10 +38,10 @@ You must have a [blank project](../../index.md#create-a-blank-project) in GitLab
|
|||
Create three files in the root (top-level) directory:
|
||||
|
||||
- `.gitlab-ci.yml`: A YAML file that contains the commands you want to run.
|
||||
For now, leave the file's contents blank.
|
||||
For now, leave the file's contents blank.
|
||||
|
||||
- `index.html`: An HTML file you can populate with whatever HTML content
|
||||
you'd like, for example:
|
||||
you'd like, for example:
|
||||
|
||||
```html
|
||||
<html>
|
||||
|
|
|
|||
|
|
@ -295,6 +295,5 @@ However, there are some minor differences:
|
|||
|
||||
Given a request to `/old`:
|
||||
|
||||
- Netlify redirects to `/new/:placeholder` (with a
|
||||
literal `:placeholder`).
|
||||
- Netlify redirects to `/new/:placeholder` (with a literal `:placeholder`).
|
||||
- GitLab redirects to `/new/`.
|
||||
|
|
|
|||
|
|
@ -114,15 +114,15 @@ at the [instance level](#instance-level-default-branch-protection) and
|
|||
[group level](#group-level-default-branch-protection) with one of the following options:
|
||||
|
||||
- **Fully protected** - Default value. Developers cannot push new commits, but maintainers can.
|
||||
No one can force push.
|
||||
No one can force push.
|
||||
- **Fully protected after initial push** - Developers can push the initial commit
|
||||
to a repository, but none afterward. Maintainers can always push. No one can force push.
|
||||
- **Protected against pushes** - Developers cannot push new commits, but are
|
||||
allowed to accept merge requests to the branch. Maintainers can push to the branch.
|
||||
allowed to accept merge requests to the branch. Maintainers can push to the branch.
|
||||
- **Partially protected** - Both developers and maintainers can push new commits,
|
||||
but cannot force push.
|
||||
but cannot force push.
|
||||
- **Not protected** - Both developers and maintainers can push new commits
|
||||
and force push.
|
||||
and force push.
|
||||
|
||||
WARNING:
|
||||
Unless **Fully protected** is chosen, a malicious developer could attempt to steal your sensitive data. For example, a malicious `.gitlab-ci.yml` file could be committed to a protected branch and later, if a pipeline is run against that branch, result in exfiltration of group CI/CD variables.
|
||||
|
|
|
|||
|
|
@ -99,7 +99,7 @@ When using Code Suggestions, [code review best practice](../../../../development
|
|||
## Progressive enhancement
|
||||
|
||||
This feature is designed as a progressive enhancement to developer IDEs.
|
||||
Code Suggestions offer a completion if a suitable recommendation is provided to the user in a timely matter.
|
||||
Code Suggestions offer a completion if a suitable recommendation is provided to the user in a timely manner.
|
||||
In the event of a connection issue or model inference failure, the feature gracefully degrades.
|
||||
Code Suggestions do not prevent you from writing code in your IDE.
|
||||
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ xray:
|
|||
image: registry.gitlab.com/gitlab-org/code-creation/repository-x-ray:latest
|
||||
allow_failure: true
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
variables:
|
||||
OUTPUT_DIR: reports
|
||||
script:
|
||||
|
|
|
|||
|
|
@ -60,16 +60,16 @@ to a branch in the repository. When you use the command line, you can commit mul
|
|||
In GitLab, you can add keywords to the commit
|
||||
message to perform one of the following actions:
|
||||
- **Trigger a GitLab CI/CD pipeline:**
|
||||
If the project is configured with [GitLab CI/CD](../../../ci/index.md),
|
||||
you trigger a pipeline per push, not per commit.
|
||||
If the project is configured with [GitLab CI/CD](../../../ci/index.md),
|
||||
you trigger a pipeline per push, not per commit.
|
||||
- **Skip pipelines:**
|
||||
Add the [`ci skip`](../../../ci/pipelines/index.md#skip-a-pipeline) keyword to
|
||||
your commit message to make GitLab CI/CD skip the pipeline.
|
||||
Add the [`ci skip`](../../../ci/pipelines/index.md#skip-a-pipeline) keyword to
|
||||
your commit message to make GitLab CI/CD skip the pipeline.
|
||||
- **Cross-link issues and merge requests:**
|
||||
Use [cross-linking](../issues/crosslinking_issues.md#from-commit-messages)
|
||||
to keep track of related parts of your workflow.
|
||||
If you mention an issue or a merge request in a commit message, they are displayed
|
||||
on their respective thread.
|
||||
Use [cross-linking](../issues/crosslinking_issues.md#from-commit-messages)
|
||||
to keep track of related parts of your workflow.
|
||||
If you mention an issue or a merge request in a commit message, they are displayed
|
||||
on their respective thread.
|
||||
- **Cherry-pick a commit:**
|
||||
In GitLab, you can
|
||||
[cherry-pick a commit](../merge_requests/cherry_pick_changes.md#cherry-pick-a-single-commit)
|
||||
|
|
|
|||
|
|
@ -317,9 +317,9 @@ To troubleshoot this:
|
|||
|
||||
1. Find the correct email address to forward emails to. Either:
|
||||
- Note the address from the verification result email that all project owners and the user that
|
||||
triggered the verification process receive.
|
||||
triggered the verification process receive.
|
||||
- Copy the address from the **Service Desk email address to forward emails to** input in the
|
||||
custom email setup form.
|
||||
custom email setup form.
|
||||
1. Forward all emails to the custom email address to the correct target email address.
|
||||
|
||||
### Enable or disable the custom email address
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ To upload a file in the Web IDE:
|
|||
To create a new directory:
|
||||
|
||||
- On the left **Explorer** sidebar, in the upper right,
|
||||
select **New Folder** (**{folder-new}**).
|
||||
select **New Folder** (**{folder-new}**).
|
||||
|
||||
1. Right-click the directory and select **Upload**.
|
||||
1. Select the file you want to upload.
|
||||
|
|
|
|||
|
|
@ -492,10 +492,10 @@ system note in the task's comments, for example:
|
|||
|
||||
## Lock discussion
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/398649) in GitLab 16.9 [with a flag](../administration/feature_flags.md) named `work_items_mvc`. Disabled by default.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/398649) in GitLab 16.9 [with a flag](../administration/feature_flags.md) named `work_items_beta`. Disabled by default.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available, an administrator can [enable the feature flag](../administration/feature_flags.md) named `work_items_mvc`.
|
||||
On self-managed GitLab, by default this feature is not available. To make it available, an administrator can [enable the feature flag](../administration/feature_flags.md) named `work_items_beta`.
|
||||
On GitLab.com, this feature is not available.
|
||||
This feature is not ready for production use.
|
||||
|
||||
|
|
|
|||
|
|
@ -29,12 +29,12 @@ which you can customize to meet the specific needs of each project.
|
|||
- Ensure autoscaling for the Kubernetes cluster is enabled.
|
||||
- In the Kubernetes cluster:
|
||||
- Verify that a [default storage class](https://kubernetes.io/docs/concepts/storage/storage-classes/)
|
||||
is defined so that volumes can be dynamically provisioned for each workspace.
|
||||
is defined so that volumes can be dynamically provisioned for each workspace.
|
||||
- Install an Ingress controller of your choice (for example, `ingress-nginx`) and make
|
||||
that controller accessible over a domain.
|
||||
that controller accessible over a domain.
|
||||
- In development environments, add an entry to the `/etc/hosts` file or update your DNS records.
|
||||
- In production environments, point `*.<workspaces.example.dev>` and `<workspaces.example.dev>`
|
||||
to the load balancer exposed by the Ingress controller.
|
||||
to the load balancer exposed by the Ingress controller.
|
||||
- [Install `gitlab-workspaces-proxy`](https://gitlab.com/gitlab-org/remote-development/gitlab-workspaces-proxy#installation-instructions).
|
||||
- [Install](../clusters/agent/install/index.md) and [configure](gitlab_agent_configuration.md) the GitLab agent.
|
||||
- You must have at least the Developer role in the root group.
|
||||
|
|
|
|||
|
|
@ -33,6 +33,11 @@ pre-push:
|
|||
files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD
|
||||
glob: '*.{yml,yaml}{,.*}'
|
||||
run: scripts/lint-yaml.sh {files}
|
||||
mermaidlint:
|
||||
tags: documentation style,backend style,frontend style
|
||||
files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD
|
||||
glob: '{app,lib,ee,spec,doc,scripts}/**/*.md'
|
||||
run: scripts/lint/check_mermaid.mjs {files}
|
||||
stylelint:
|
||||
tags: stylesheet css style
|
||||
files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ module ClickHouse
|
|||
|
||||
def each_batch(column: :id, of: 10_000)
|
||||
min, max = min_max(column)
|
||||
return if min.nil? || max == 0
|
||||
return if min.nil? || max.nil? || max == 0
|
||||
|
||||
loop do
|
||||
break if min > max
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ module Gitlab
|
|||
# an administrator must have explicitly enabled admin-mode
|
||||
# e.g. on web access require re-authentication
|
||||
class CurrentUserMode
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
NotRequestedError = Class.new(StandardError)
|
||||
|
||||
# RequestStore entries
|
||||
|
|
@ -85,8 +86,9 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
def initialize(user)
|
||||
def initialize(user, session = Gitlab::Session.current)
|
||||
@user = user
|
||||
@session = session
|
||||
end
|
||||
|
||||
def admin_mode?
|
||||
|
|
@ -138,6 +140,11 @@ module Gitlab
|
|||
current_session_data[ADMIN_MODE_REQUESTED_TIME_KEY] = Time.now
|
||||
end
|
||||
|
||||
def current_session_data
|
||||
Gitlab::NamespacedSessionStore.new(SESSION_STORE_KEY, @session)
|
||||
end
|
||||
strong_memoize_attr :current_session_data
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user
|
||||
|
|
@ -152,10 +159,6 @@ module Gitlab
|
|||
@admin_mode_requested_rs_key ||= { res: :current_user_mode, user: user.id, method: :admin_mode_requested? }
|
||||
end
|
||||
|
||||
def current_session_data
|
||||
@current_session ||= Gitlab::NamespacedSessionStore.new(SESSION_STORE_KEY)
|
||||
end
|
||||
|
||||
def session_with_admin_mode?
|
||||
return true if bypass_session?
|
||||
|
||||
|
|
|
|||
|
|
@ -5,8 +5,7 @@ module Gitlab
|
|||
module Metrics
|
||||
module Aggregates
|
||||
UNION_OF_AGGREGATED_METRICS = 'OR'
|
||||
INTERSECTION_OF_AGGREGATED_METRICS = 'AND'
|
||||
ALLOWED_METRICS_AGGREGATIONS = [UNION_OF_AGGREGATED_METRICS, INTERSECTION_OF_AGGREGATED_METRICS].freeze
|
||||
ALLOWED_METRICS_AGGREGATIONS = [UNION_OF_AGGREGATED_METRICS].freeze
|
||||
AggregatedMetricError = Class.new(StandardError)
|
||||
UnknownAggregationOperator = Class.new(AggregatedMetricError)
|
||||
UnknownAggregationSource = Class.new(AggregatedMetricError)
|
||||
|
|
|
|||
|
|
@ -17,11 +17,8 @@ module Gitlab
|
|||
events = select_defined_events(aggregation[:events], aggregation[:source])
|
||||
property_name = aggregation[:attribute]
|
||||
|
||||
if aggregation[:operator] == UNION_OF_AGGREGATED_METRICS
|
||||
source.calculate_metrics_union(**time_constraints(time_frame).merge(metric_names: events, property_name: property_name, recorded_at: recorded_at))
|
||||
else
|
||||
source.calculate_metrics_intersections(**time_constraints(time_frame).merge(metric_names: events, property_name: property_name, recorded_at: recorded_at))
|
||||
end
|
||||
source.calculate_metrics_union(**time_constraints(time_frame)
|
||||
.merge(metric_names: events, property_name: property_name, recorded_at: recorded_at))
|
||||
end
|
||||
rescue Gitlab::UsageDataCounters::HLLRedisCounter::EventError, AggregatedMetricError => error
|
||||
failure(error)
|
||||
|
|
|
|||
|
|
@ -1,74 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Usage
|
||||
module Metrics
|
||||
module Aggregates
|
||||
module Sources
|
||||
module Calculations
|
||||
module Intersection
|
||||
def calculate_metrics_intersections(metric_names:, start_date:, end_date:, recorded_at:, property_name:, subset_powers_cache: Hash.new({}))
|
||||
# calculate power of intersection of all given metrics from inclusion exclusion principle
|
||||
# |A + B + C| = (|A| + |B| + |C|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C|) =>
|
||||
# |A & B & C| = - (|A| + |B| + |C|) + (|A & B| + |A & C| + .. + |C & D|) + |A + B + C|
|
||||
# |A + B + C + D| = (|A| + |B| + |C| + |D|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C| + |B & C & D|) - |A & B & C & D| =>
|
||||
# |A & B & C & D| = (|A| + |B| + |C| + |D|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C| + |B & C & D|) - |A + B + C + D|
|
||||
|
||||
# calculate each components of equation except for the last one |A & B & C & D| = (|A| + |B| + |C| + |D|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C| + |B & C & D|) - ...
|
||||
subset_powers_data = subsets_intersection_powers(metric_names, start_date, end_date, recorded_at, property_name, subset_powers_cache)
|
||||
|
||||
# calculate last component of the equation |A & B & C & D| = .... - |A + B + C + D|
|
||||
power_of_union_of_all_metrics = subset_powers_cache[metric_names.size][metric_names.join('_+_')] ||= \
|
||||
calculate_metrics_union(metric_names: metric_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at, property_name: property_name)
|
||||
|
||||
# in order to determine if part of equation (|A & B & C|, |A & B & C & D|), that represents the intersection that we need to calculate,
|
||||
# is positive or negative in particular equation we need to determine if number of subsets is even or odd. Please take a look at two examples below
|
||||
# |A + B + C| = (|A| + |B| + |C|) - (|A & B| + |A & C| + .. + |C & D|) + |A & B & C| =>
|
||||
# |A & B & C| = - (|A| + |B| + |C|) + (|A & B| + |A & C| + .. + |C & D|) + |A + B + C|
|
||||
# |A + B + C + D| = (|A| + |B| + |C| + |D|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C| + |B & C & D|) - |A & B & C & D| =>
|
||||
# |A & B & C & D| = (|A| + |B| + |C| + |D|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C| + |B & C & D|) - |A + B + C + D|
|
||||
subset_powers_size_even = subset_powers_data.size.even?
|
||||
|
||||
# sum all components of equation except for the last one |A & B & C & D| = (|A| + |B| + |C| + |D|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C| + |B & C & D|) - ... =>
|
||||
sum_of_all_subset_powers = sum_subset_powers(subset_powers_data, subset_powers_size_even)
|
||||
|
||||
# add last component of the equation |A & B & C & D| = sum_of_all_subset_powers - |A + B + C + D|
|
||||
sum_of_all_subset_powers + (subset_powers_size_even ? power_of_union_of_all_metrics : -power_of_union_of_all_metrics)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def subsets_intersection_powers(metric_names, start_date, end_date, recorded_at, property_name, subset_powers_cache)
|
||||
subset_sizes = (1...metric_names.size)
|
||||
|
||||
subset_sizes.map do |subset_size|
|
||||
if subset_size > 1
|
||||
# calculate sum of powers of intersection between each subset (with given size) of metrics: #|A + B + C + D| = ... - (|A & B| + |A & C| + .. + |C & D|)
|
||||
metric_names.combination(subset_size).sum do |metrics_subset|
|
||||
subset_powers_cache[subset_size][metrics_subset.join('_&_')] ||=
|
||||
calculate_metrics_intersections(metric_names: metrics_subset, start_date: start_date, end_date: end_date, recorded_at: recorded_at, subset_powers_cache: subset_powers_cache, property_name: property_name)
|
||||
end
|
||||
else
|
||||
# calculate sum of powers of each set (metric) alone #|A + B + C + D| = (|A| + |B| + |C| + |D|) - ...
|
||||
metric_names.sum do |metric|
|
||||
subset_powers_cache[subset_size][metric] ||= \
|
||||
calculate_metrics_union(metric_names: metric, start_date: start_date, end_date: end_date, recorded_at: recorded_at, property_name: property_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def sum_subset_powers(subset_powers_data, subset_powers_size_even)
|
||||
sum_without_sign = subset_powers_data.to_enum.with_index.sum do |value, index|
|
||||
(index + 1).odd? ? value : -value
|
||||
end
|
||||
|
||||
(subset_powers_size_even ? -1 : 1) * sum_without_sign
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
module Page
|
||||
module Explore
|
||||
class CiCdCatalog < Page::Base
|
||||
view 'app/assets/javascripts/ci/catalog/components/list/ci_resources_list.vue' do
|
||||
element 'catalog-list-container'
|
||||
end
|
||||
|
||||
view 'app/assets/javascripts/ci/catalog/components/list/ci_resources_list_item.vue' do
|
||||
element 'catalog-resource-item'
|
||||
end
|
||||
|
||||
view 'app/assets/javascripts/ci/catalog/components/list/catalog_search.vue' do
|
||||
element 'catalog-search-bar', required: true
|
||||
element 'catalog-sorting-option-button', required: true
|
||||
end
|
||||
|
||||
def sort_by_created_at
|
||||
switch_catalog_sorting_option('CREATED')
|
||||
end
|
||||
|
||||
def sort_in_ascending_order
|
||||
# Switching from descending to ascending
|
||||
click_element('sort-highest-icon')
|
||||
wait_for_requests
|
||||
end
|
||||
|
||||
def get_top_project_names(count)
|
||||
all_elements('ci-resource-link', minimum: 1).first(count).map(&:text)
|
||||
end
|
||||
|
||||
def get_bottom_project_names(count)
|
||||
all_elements('ci-resource-link', minimum: 1).last(count).map(&:text)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Current acceptable options: 'CREATED', 'RELEASED'
|
||||
def switch_catalog_sorting_option(option)
|
||||
click_element('catalog-sorting-option-button')
|
||||
find("[data-testid='listbox-item-#{option}']").click
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -8,6 +8,7 @@ module QA
|
|||
# since tablets have the regular top navigation bar
|
||||
include SubMenus::CreateNewMenu
|
||||
include SubMenus::SuperSidebar::GlobalSearchModal
|
||||
include SubMenus::Explore
|
||||
|
||||
view 'app/assets/javascripts/super_sidebar/components/super_sidebar.vue' do
|
||||
element 'super-sidebar', required: true
|
||||
|
|
|
|||
|
|
@ -10,10 +10,20 @@ module QA
|
|||
element 'project-features-save-button'
|
||||
end
|
||||
|
||||
view 'app/assets/javascripts/pages/projects/shared/permissions/components/ci_catalog_settings.vue' do
|
||||
element 'catalog-resource-toggle'
|
||||
end
|
||||
|
||||
def set_project_visibility(visibility)
|
||||
select_element('project-visibility-dropdown', visibility)
|
||||
click_element 'project-features-save-button'
|
||||
end
|
||||
|
||||
def enable_ci_cd_catalog_resource
|
||||
within_element('catalog-resource-toggle') do
|
||||
find('.gl-toggle').click
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
module Page
|
||||
module SubMenus
|
||||
module Explore
|
||||
extend QA::Page::PageConcern
|
||||
|
||||
def self.prepended(base)
|
||||
super
|
||||
|
||||
base.class_eval do
|
||||
view 'app/assets/javascripts/super_sidebar/components/nav_item.vue' do
|
||||
element 'nav-item-link'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def go_to_ci_cd_catalog
|
||||
click_element('nav-item-link', submenu_item: 'CI/CD Catalog')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,131 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
RSpec.describe 'Verify', :skip_live_env, product_group: :pipeline_authoring do
|
||||
describe 'CI catalog' do
|
||||
let(:project_count) { 3 }
|
||||
|
||||
let(:catalog_project_list) do
|
||||
create_list(
|
||||
:project,
|
||||
project_count,
|
||||
:with_readme,
|
||||
name: 'project-for-catalog',
|
||||
description: 'This is a catalog project.'
|
||||
)
|
||||
end
|
||||
|
||||
let(:tag) { '1.0.0' }
|
||||
let(:test_project_names) { catalog_project_list.map(&:name) }
|
||||
|
||||
shared_examples 'descending order by default' do |testcase|
|
||||
it 'displays from last to first', testcase: testcase do
|
||||
Page::Explore::CiCdCatalog.perform do |catalog|
|
||||
expect(top_projects_from_ui(catalog)).to eql(test_project_names.reverse)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'ascending order' do |testcase|
|
||||
it 'displays from first to last', testcase: testcase do
|
||||
Page::Explore::CiCdCatalog.perform do |catalog|
|
||||
catalog.sort_in_ascending_order
|
||||
expect(bottom_projects_from_ui(catalog)).to eql(test_project_names)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when sorting' do
|
||||
before do
|
||||
Flow::Login.sign_in
|
||||
|
||||
catalog_project_list.each do |project|
|
||||
enable_catalog_resource_feature(project)
|
||||
setup_component(project)
|
||||
create_release(project)
|
||||
end
|
||||
|
||||
Page::Main::Menu.perform do |main|
|
||||
main.go_to_explore
|
||||
main.go_to_ci_cd_catalog
|
||||
end
|
||||
end
|
||||
|
||||
context 'with released at' do
|
||||
it_behaves_like 'descending order by default',
|
||||
'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/441478'
|
||||
|
||||
it_behaves_like 'ascending order',
|
||||
'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/441477'
|
||||
end
|
||||
|
||||
context 'with created at' do
|
||||
before do
|
||||
Page::Explore::CiCdCatalog.perform(&:sort_by_created_at)
|
||||
end
|
||||
|
||||
it_behaves_like 'descending order by default',
|
||||
'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/441479'
|
||||
|
||||
it_behaves_like 'ascending order',
|
||||
'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/441475'
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def enable_catalog_resource_feature(project)
|
||||
project.visit!
|
||||
|
||||
Page::Project::Menu.perform(&:go_to_general_settings)
|
||||
Page::Project::Settings::Main.perform do |settings|
|
||||
settings.expand_visibility_project_features_permissions(&:enable_ci_cd_catalog_resource)
|
||||
end
|
||||
end
|
||||
|
||||
def setup_component(project)
|
||||
create(:commit, project: project, commit_message: 'Add .gitlab-ci.yml and component', actions: [
|
||||
{
|
||||
action: 'create',
|
||||
file_path: '.gitlab-ci.yml',
|
||||
content: <<~YAML
|
||||
create-release:
|
||||
stage: deploy
|
||||
script: echo "Creating release $CI_COMMIT_TAG"
|
||||
rules:
|
||||
- if: $CI_COMMIT_TAG
|
||||
release:
|
||||
tag_name: $CI_COMMIT_TAG
|
||||
description: "Release $CI_COMMIT_TAG of components in $CI_PROJECT_PATH"
|
||||
YAML
|
||||
},
|
||||
{
|
||||
action: 'create',
|
||||
file_path: 'templates/new_component.yml',
|
||||
content: <<~YAML
|
||||
spec:
|
||||
inputs:
|
||||
scanner-output:
|
||||
default: json
|
||||
---
|
||||
my-scanner:
|
||||
script: my-scan --output $[[ inputs.scanner-output ]]
|
||||
YAML
|
||||
}
|
||||
])
|
||||
end
|
||||
|
||||
def create_release(project)
|
||||
project.create_release(tag)
|
||||
end
|
||||
|
||||
def top_projects_from_ui(page_object)
|
||||
page_object.get_top_project_names(project_count)
|
||||
end
|
||||
|
||||
def bottom_projects_from_ui(page_object)
|
||||
page_object.get_bottom_project_names(project_count)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,77 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
// Lint mermaid code in markdown files.
|
||||
// Usage: scripts/lint/check_mermaid.mjs [files ...]
|
||||
|
||||
import fs from 'node:fs';
|
||||
import glob from 'glob';
|
||||
import mermaid from 'mermaid';
|
||||
import DOMPurify from 'dompurify';
|
||||
import { JSDOM } from 'jsdom';
|
||||
|
||||
const jsdom = new JSDOM('...', {
|
||||
pretendToBeVisual: true,
|
||||
});
|
||||
global.document = jsdom;
|
||||
global.window = jsdom.window;
|
||||
global.Option = window.Option;
|
||||
|
||||
// Workaround to make DOMPurify not fail.
|
||||
// See https://github.com/mermaid-js/mermaid/issues/5204
|
||||
DOMPurify.addHook = () => {};
|
||||
DOMPurify.sanitize = (x) => x;
|
||||
|
||||
const defaultGlob = "{app,lib,ee,spec,doc,scripts}/**/*.md";
|
||||
const mermaidMatch = /```mermaid(.*?)```/gms;
|
||||
|
||||
const argv = process.argv.length > 2 ? process.argv.slice(2) : [defaultGlob];
|
||||
const mdFiles = argv.flatMap((arg) => glob.sync(arg))
|
||||
|
||||
console.log(`Checking ${mdFiles.length} markdown files...`);
|
||||
|
||||
// Mimicking app/assets/javascripts/lib/mermaid.js
|
||||
mermaid.initialize({
|
||||
// mermaid core options
|
||||
mermaid: {
|
||||
startOnLoad: false,
|
||||
},
|
||||
// mermaidAPI options
|
||||
theme: 'neutral',
|
||||
flowchart: {
|
||||
useMaxWidth: true,
|
||||
htmlLabels: true,
|
||||
},
|
||||
secure: ['secure', 'securityLevel', 'startOnLoad', 'maxTextSize', 'htmlLabels'],
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
let errors = 0;
|
||||
|
||||
await Promise.all(
|
||||
mdFiles.map((path) => {
|
||||
const data = fs.readFileSync(path, 'utf8');
|
||||
|
||||
const matched = [...data.matchAll(mermaidMatch)];
|
||||
|
||||
return Promise.all(
|
||||
matched.map((match) => {
|
||||
const matchIndex = match.index;
|
||||
const mermaidText = match[1];
|
||||
|
||||
return mermaid.parse(mermaidText).catch((error) => {
|
||||
const lineNumber = data.slice(0, matchIndex).split('\n').length;
|
||||
|
||||
console.log(`${path}:${lineNumber}: Mermaid syntax error\nError: ${error}\n`);
|
||||
errors += 1;
|
||||
});
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
if (errors > 0) {
|
||||
console.log(`Total errors: ${errors}`);
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
console.log(`To fix these errors, see https://docs.gitlab.com/ee/development/documentation/testing.html#mermaid.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
|
@ -46,6 +46,7 @@ class StaticAnalysis
|
|||
Task.new(%w[bin/rake config_lint], 10),
|
||||
Task.new(%w[bin/rake gitlab:sidekiq:all_queues_yml:check], 15),
|
||||
(Gitlab.ee? ? Task.new(%w[bin/rake gitlab:sidekiq:sidekiq_queues_yml:check], 11) : nil),
|
||||
Task.new(%w[scripts/lint/check_mermaid.mjs], 10),
|
||||
Task.new(%w[yarn run internal:stylelint], 8),
|
||||
Task.new(%w[scripts/lint-conflicts.sh], 1),
|
||||
Task.new(%w[yarn run block-dependencies], 1),
|
||||
|
|
|
|||
|
|
@ -110,4 +110,20 @@ RSpec.describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state, fe
|
|||
expect(page).to have_content('You need to sign in or sign up before continuing.')
|
||||
end
|
||||
end
|
||||
|
||||
it 'load_raw_session does load known attributes only' do
|
||||
new_session = ActiveSession.send(:load_raw_session,
|
||||
'v2:{"ip_address": "127.0.0.1", "browser": "Firefox", "os": "Debian",' \
|
||||
'"device_type": "desktop", "session_id": "8f62cc7383c",' \
|
||||
'"new_attribute": "unknown attribute"}'
|
||||
)
|
||||
|
||||
expect(new_session).to have_attributes(
|
||||
ip_address: "127.0.0.1",
|
||||
browser: "Firefox",
|
||||
os: "Debian",
|
||||
device_type: "desktop",
|
||||
session_id: "8f62cc7383c"
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ describe('WorkItemActions component', () => {
|
|||
},
|
||||
provide: {
|
||||
isGroup: false,
|
||||
glFeatures: { workItemsMvc: true, workItemsMvc2: true },
|
||||
glFeatures: { workItemsBeta: true, workItemsMvc2: true },
|
||||
},
|
||||
mocks: {
|
||||
$toast,
|
||||
|
|
|
|||
|
|
@ -16,9 +16,10 @@ import WorkItemAttributesWrapper from '~/work_items/components/work_item_attribu
|
|||
import {
|
||||
workItemResponseFactory,
|
||||
taskType,
|
||||
issueType,
|
||||
objectiveType,
|
||||
keyResultType,
|
||||
issueType,
|
||||
epicType,
|
||||
} from '../mock_data';
|
||||
|
||||
describe('WorkItemAttributesWrapper component', () => {
|
||||
|
|
@ -54,7 +55,7 @@ describe('WorkItemAttributesWrapper component', () => {
|
|||
hasIssuableHealthStatusFeature: true,
|
||||
projectNamespace: 'namespace',
|
||||
glFeatures: {
|
||||
workItemsMvc: true,
|
||||
workItemsBeta: true,
|
||||
workItemsMvc2,
|
||||
},
|
||||
},
|
||||
|
|
@ -185,11 +186,12 @@ describe('WorkItemAttributesWrapper component', () => {
|
|||
|
||||
describe('parent widget', () => {
|
||||
describe.each`
|
||||
description | workItemType | exists
|
||||
${'when work item type is task'} | ${taskType} | ${true}
|
||||
${'when work item type is objective'} | ${objectiveType} | ${true}
|
||||
${'when work item type is keyresult'} | ${keyResultType} | ${true}
|
||||
${'when work item type is issue'} | ${issueType} | ${false}
|
||||
description | workItemType | exists
|
||||
${'when work item type is task'} | ${taskType} | ${true}
|
||||
${'when work item type is objective'} | ${objectiveType} | ${true}
|
||||
${'when work item type is key result'} | ${keyResultType} | ${true}
|
||||
${'when work item type is issue'} | ${issueType} | ${true}
|
||||
${'when work item type is epic'} | ${epicType} | ${true}
|
||||
`('$description', ({ workItemType, exists }) => {
|
||||
it(`${exists ? 'renders' : 'does not render'} parent component`, async () => {
|
||||
const response = workItemResponseFactory({ workItemType });
|
||||
|
|
|
|||
|
|
@ -7,6 +7,55 @@ RSpec.describe Gitlab::Auth::CurrentUserMode, :request_store, feature_category:
|
|||
|
||||
subject { described_class.new(user) }
|
||||
|
||||
describe '#initialize' do
|
||||
context 'with user' do
|
||||
around do |example|
|
||||
Gitlab::Session.with_session(nil) do
|
||||
example.run
|
||||
end
|
||||
end
|
||||
|
||||
it 'has no session' do
|
||||
subject
|
||||
expect(Gitlab::Session.current).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'with user and session' do
|
||||
include_context 'custom session'
|
||||
let(:session) { { 'key' => "value" } }
|
||||
|
||||
it 'has a session' do
|
||||
described_class.new(user, session)
|
||||
expect(Gitlab::Session.current).to eq(session)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#current_session_data' do
|
||||
include_context 'custom session'
|
||||
let(:session) { { 'key' => "value" } }
|
||||
|
||||
it 'without session' do
|
||||
expect(Gitlab::Session.current).to eq(session)
|
||||
|
||||
expect(Gitlab::NamespacedSessionStore).to receive(:new).with(described_class::SESSION_STORE_KEY, session)
|
||||
|
||||
subject.current_session_data
|
||||
expect(Gitlab::Session.current).to eq(session)
|
||||
end
|
||||
|
||||
it 'with session' do
|
||||
expect(Gitlab::Session.current).to eq(session)
|
||||
subject = described_class.new(user, session)
|
||||
|
||||
expect(Gitlab::NamespacedSessionStore).to receive(:new).with(described_class::SESSION_STORE_KEY, session)
|
||||
|
||||
subject.current_session_data
|
||||
expect(Gitlab::Session.current).to eq(session)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when session is available' do
|
||||
include_context 'custom session'
|
||||
|
||||
|
|
|
|||
|
|
@ -20,16 +20,10 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
|
|||
|
||||
context 'with valid configuration' do
|
||||
where(:number_of_days, :operator, :datasource, :attribute, :expected_method) do
|
||||
28 | 'AND' | 'redis_hll' | 'user.id' | :calculate_metrics_intersections
|
||||
7 | 'AND' | 'redis_hll' | 'user.id' | :calculate_metrics_intersections
|
||||
28 | 'AND' | 'database' | 'project.id' | :calculate_metrics_intersections
|
||||
7 | 'AND' | 'database' | 'user.id' | :calculate_metrics_intersections
|
||||
28 | 'OR' | 'redis_hll' | 'user.id' | :calculate_metrics_union
|
||||
7 | 'OR' | 'redis_hll' | 'project.id' | :calculate_metrics_union
|
||||
28 | 'OR' | 'database' | 'user.id' | :calculate_metrics_union
|
||||
7 | 'OR' | 'database' | 'user.id' | :calculate_metrics_union
|
||||
28 | 'AND' | 'internal_events' | 'user.id' | :calculate_metrics_intersections
|
||||
7 | 'AND' | 'internal_events' | 'project.id' | :calculate_metrics_intersections
|
||||
28 | 'OR' | 'internal_events' | 'user.id' | :calculate_metrics_union
|
||||
7 | 'OR' | 'internal_events' | 'user.id' | :calculate_metrics_union
|
||||
end
|
||||
|
|
@ -73,8 +67,6 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
|
|||
end
|
||||
|
||||
where(:operator, :datasource, :expected_method, :expected_events) do
|
||||
'AND' | 'redis_hll' | :calculate_metrics_intersections | %w[event1 event2]
|
||||
'AND' | 'database' | :calculate_metrics_intersections | %w[event1 event2 event3]
|
||||
'OR' | 'redis_hll' | :calculate_metrics_union | %w[event1 event2]
|
||||
'OR' | 'database' | :calculate_metrics_union | %w[event1 event2 event3]
|
||||
end
|
||||
|
|
@ -110,8 +102,8 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
|
|||
context 'with invalid configuration' do
|
||||
where(:time_frame, :operator, :datasource, :expected_error) do
|
||||
'28d' | 'SUM' | 'redis_hll' | namespace::UnknownAggregationOperator
|
||||
'7d' | 'AND' | 'mongodb' | namespace::UnknownAggregationSource
|
||||
'all' | 'AND' | 'redis_hll' | namespace::DisallowedAggregationTimeFrame
|
||||
'7d' | 'OR' | 'mongodb' | namespace::UnknownAggregationSource
|
||||
'all' | 'OR' | 'redis_hll' | namespace::DisallowedAggregationTimeFrame
|
||||
end
|
||||
|
||||
with_them do
|
||||
|
|
|
|||
|
|
@ -1,90 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::Calculations::Intersection do
|
||||
let_it_be(:recorded_at) { Time.current.to_i }
|
||||
let_it_be(:start_date) { 4.weeks.ago.to_date }
|
||||
let_it_be(:end_date) { Date.current }
|
||||
let_it_be(:property_name) { 'property1' }
|
||||
|
||||
shared_examples 'aggregated_metrics_data with source' do
|
||||
context 'with AND operator' do
|
||||
let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at, property_name: property_name } }
|
||||
|
||||
context 'with even number of metrics' do
|
||||
it 'calculates intersection correctly', :aggregate_failures do
|
||||
# gmau_1 data is as follow
|
||||
# |A| => 4
|
||||
expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(4)
|
||||
# |B| => 6
|
||||
expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event5')).and_return(6)
|
||||
# |A + B| => 8
|
||||
expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event3 event5])).and_return(8)
|
||||
# Exclusion inclusion principle formula to calculate intersection of 2 sets
|
||||
# |A & B| = (|A| + |B|) - |A + B| => (4 + 6) - 8 => 2
|
||||
expect(source.calculate_metrics_intersections(metric_names: %w[event3 event5], start_date: start_date, end_date: end_date, recorded_at: recorded_at, property_name: property_name)).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with odd number of metrics' do
|
||||
it 'calculates intersection correctly', :aggregate_failures do
|
||||
# gmau_2 data is as follow:
|
||||
# |A| => 2
|
||||
expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event1')).and_return(2)
|
||||
# |B| => 3
|
||||
expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event2')).and_return(3)
|
||||
# |C| => 5
|
||||
expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(5)
|
||||
|
||||
# |A + B| => 4 therefore |A & B| = (|A| + |B|) - |A + B| => 2 + 3 - 4 => 1
|
||||
expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2])).and_return(4)
|
||||
# |A + C| => 6 therefore |A & C| = (|A| + |C|) - |A + C| => 2 + 5 - 6 => 1
|
||||
expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event3])).and_return(6)
|
||||
# |B + C| => 7 therefore |B & C| = (|B| + |C|) - |B + C| => 3 + 5 - 7 => 1
|
||||
expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event2 event3])).and_return(7)
|
||||
# |A + B + C| => 8
|
||||
expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(8)
|
||||
# Exclusion inclusion principle formula to calculate intersection of 3 sets
|
||||
# |A & B & C| = (|A & B| + |A & C| + |B & C|) - (|A| + |B| + |C|) + |A + B + C|
|
||||
# (1 + 1 + 1) - (2 + 3 + 5) + 8 => 1
|
||||
expect(source.calculate_metrics_intersections(metric_names: %w[event1 event2 event3], start_date: start_date, end_date: end_date, recorded_at: recorded_at, property_name: property_name)).to eq(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.aggregated_metrics_data' do
|
||||
let(:source) do
|
||||
Class.new do
|
||||
extend Gitlab::Usage::Metrics::Aggregates::Sources::Calculations::Intersection
|
||||
end
|
||||
end
|
||||
|
||||
it 'caches intermediate operations', :aggregate_failures do
|
||||
events = %w[event1 event2 event3 event5]
|
||||
|
||||
params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at, property_name: property_name }
|
||||
|
||||
events.each do |event|
|
||||
expect(source).to receive(:calculate_metrics_union)
|
||||
.with(params.merge(metric_names: event))
|
||||
.once
|
||||
.and_return(0)
|
||||
end
|
||||
|
||||
2.upto(4) do |subset_size|
|
||||
events.combination(subset_size).each do |events|
|
||||
expect(source).to receive(:calculate_metrics_union)
|
||||
.with(params.merge(metric_names: events))
|
||||
.once
|
||||
.and_return(0)
|
||||
end
|
||||
end
|
||||
|
||||
expect(source.calculate_metrics_intersections(metric_names: events, start_date: start_date, end_date: end_date, recorded_at: recorded_at, property_name: property_name)).to eq(0)
|
||||
end
|
||||
|
||||
it_behaves_like 'aggregated_metrics_data with source'
|
||||
end
|
||||
end
|
||||
|
|
@ -3634,10 +3634,10 @@ RSpec.describe Group, feature_category: :groups_and_projects do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#work_items_mvc_feature_flag_enabled?' do
|
||||
describe '#work_items_beta_feature_flag_enabled?' do
|
||||
it_behaves_like 'checks self and root ancestor feature flag' do
|
||||
let(:feature_flag) { :work_items_mvc }
|
||||
let(:feature_flag_method) { :work_items_mvc_feature_flag_enabled? }
|
||||
let(:feature_flag) { :work_items_beta }
|
||||
let(:feature_flag_method) { :work_items_beta_feature_flag_enabled? }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -8608,12 +8608,12 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
|
|||
end
|
||||
end
|
||||
|
||||
describe '#work_items_mvc_feature_flag_enabled?' do
|
||||
describe '#work_items_beta_feature_flag_enabled?' do
|
||||
let_it_be(:group_project) { create(:project, :in_subgroup) }
|
||||
|
||||
it_behaves_like 'checks parent group feature flag' do
|
||||
let(:feature_flag_method) { :work_items_mvc_feature_flag_enabled? }
|
||||
let(:feature_flag) { :work_items_mvc }
|
||||
let(:feature_flag_method) { :work_items_beta_feature_flag_enabled? }
|
||||
let(:feature_flag) { :work_items_beta }
|
||||
let(:subject_project) { group_project }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,19 +10,30 @@ RSpec.describe ClickHouse::RebuildMaterializedViewService, :click_house, feature
|
|||
let_it_be(:event3) { create(:closed_issue_event) }
|
||||
|
||||
let(:connection) { ClickHouse::Connection.new(:main) }
|
||||
let(:runtime_limiter) { Gitlab::Metrics::RuntimeLimiter.new }
|
||||
|
||||
before do
|
||||
insert_events_into_click_house
|
||||
end
|
||||
|
||||
def invoke_service
|
||||
described_class.new(connection: connection, state: {
|
||||
let(:state) do
|
||||
{
|
||||
view_name: 'contributions_mv',
|
||||
view_table_name: 'contributions',
|
||||
tmp_view_name: 'tmp_contributions_mv',
|
||||
tmp_view_table_name: 'tmp_contributions',
|
||||
source_table_name: 'events'
|
||||
}).execute
|
||||
}
|
||||
end
|
||||
|
||||
subject(:service_response) { run_service }
|
||||
|
||||
def run_service(new_state = state)
|
||||
described_class.new(
|
||||
connection: connection,
|
||||
runtime_limiter: runtime_limiter,
|
||||
state: new_state
|
||||
).execute
|
||||
end
|
||||
|
||||
before do
|
||||
insert_events_into_click_house
|
||||
end
|
||||
|
||||
it 're-creates the materialized view with correct data from the source table' do
|
||||
|
|
@ -35,14 +46,16 @@ RSpec.describe ClickHouse::RebuildMaterializedViewService, :click_house, feature
|
|||
expect(ids).to eq([event1.id])
|
||||
|
||||
# Rebuild the MV so we get the inconsistency corrected
|
||||
invoke_service
|
||||
expect(service_response).to be_success
|
||||
payload = service_response.payload
|
||||
expect(payload[:status]).to eq(:finished)
|
||||
|
||||
ids = connection.select('SELECT id FROM contributions FINAL').pluck('id')
|
||||
expect(ids).to match_array([event1.id, event2.id, event3.id])
|
||||
end
|
||||
|
||||
it 'does not leave temporary tables around' do
|
||||
invoke_service
|
||||
expect(service_response).to be_success
|
||||
|
||||
view_query = <<~SQL
|
||||
SELECT view_definition FROM information_schema.views
|
||||
|
|
@ -59,4 +72,59 @@ RSpec.describe ClickHouse::RebuildMaterializedViewService, :click_house, feature
|
|||
expect(connection.select(view_query)).to be_empty
|
||||
expect(connection.select(table_query)).to be_empty
|
||||
end
|
||||
|
||||
context 'when the rebuild_mv_drop_old_tables FF is off' do
|
||||
it 'preserves the old tables' do
|
||||
stub_feature_flags(rebuild_mv_drop_old_tables: false)
|
||||
expect(service_response).to be_success
|
||||
|
||||
view_query = <<~SQL
|
||||
SELECT view_definition FROM information_schema.views
|
||||
WHERE table_name = 'tmp_contributions_mv' AND
|
||||
table_schema = '#{connection.database_name}'
|
||||
SQL
|
||||
|
||||
table_query = <<~SQL
|
||||
SELECT table_name FROM information_schema.tables
|
||||
WHERE table_name = 'tmp_contributions' AND
|
||||
table_schema = '#{connection.database_name}'
|
||||
SQL
|
||||
|
||||
expect(connection.select(view_query)).not_to be_empty
|
||||
expect(connection.select(table_query)).not_to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the processing is stopped due to over time' do
|
||||
before do
|
||||
stub_const("#{described_class}::INSERT_BATCH_SIZE", 1)
|
||||
end
|
||||
|
||||
it 'returns time_limit status and the cursor' do
|
||||
allow(runtime_limiter).to receive(:over_time?).and_return(true)
|
||||
expect(service_response).to be_success
|
||||
|
||||
payload = service_response.payload
|
||||
expect(payload[:status]).to eq(:over_time)
|
||||
expect(payload[:next_value]).to eq(event1.id + 1)
|
||||
end
|
||||
|
||||
context 'when the service is invoked three times' do
|
||||
it 'finishes the processing' do
|
||||
allow(runtime_limiter).to receive(:over_time?).and_return(true)
|
||||
|
||||
service_response = run_service
|
||||
expect(service_response.payload[:status]).to eq(:over_time)
|
||||
|
||||
service_response = run_service(state.merge(next_value: service_response.payload[:next_value]))
|
||||
expect(service_response.payload[:status]).to eq(:over_time)
|
||||
|
||||
service_response = run_service(state.merge(next_value: service_response.payload[:next_value]))
|
||||
expect(service_response.payload[:status]).to eq(:over_time)
|
||||
|
||||
service_response = run_service(state.merge(next_value: service_response.payload[:next_value]))
|
||||
expect(service_response.payload[:status]).to eq(:finished)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6735,7 +6735,6 @@
|
|||
- './spec/lib/gitlab/usage_data/topology_spec.rb'
|
||||
- './spec/lib/gitlab/usage/metric_definition_spec.rb'
|
||||
- './spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb'
|
||||
- './spec/lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection_spec.rb'
|
||||
- './spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb'
|
||||
- './spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb'
|
||||
- './spec/lib/gitlab/usage/metrics/instrumentations/active_user_count_metric_spec.rb'
|
||||
|
|
|
|||
|
|
@ -2,12 +2,81 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe ClickHouse::RebuildMaterializedViewCronWorker, feature_category: :database do
|
||||
it 'invokes the RebuildMaterializedViewService' do
|
||||
allow_next_instance_of(ClickHouse::RebuildMaterializedViewService) do |instance|
|
||||
allow(instance).to receive(:execute)
|
||||
end
|
||||
|
||||
RSpec.describe ClickHouse::RebuildMaterializedViewCronWorker, :clean_gitlab_redis_shared_state, :freeze_time, feature_category: :database do
|
||||
def run_job
|
||||
described_class.new.perform
|
||||
end
|
||||
|
||||
context 'when the previous run was just recently' do
|
||||
before do
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
state = { finished_at: 1.day.ago.to_json }
|
||||
redis.set(described_class.redis_key, Gitlab::Json.dump(state))
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not invoke the service' do
|
||||
expect(ClickHouse::RebuildMaterializedViewService).not_to receive(:new)
|
||||
|
||||
run_job
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the rebuild_contributions_mv feature flag is disabled' do
|
||||
it 'does not invoke the service' do
|
||||
stub_feature_flags(rebuild_contributions_mv: false)
|
||||
|
||||
expect(ClickHouse::RebuildMaterializedViewService).not_to receive(:new)
|
||||
|
||||
run_job
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the service is finished', :click_house do
|
||||
it 'persists the finished_at timestamp' do
|
||||
run_job
|
||||
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
data = Gitlab::Json.parse(redis.get(described_class.redis_key))
|
||||
expect(DateTime.parse(data['finished_at'])).to eq(Time.current)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the service is interrupted' do
|
||||
it 'persists the next value to continue the processing from' do
|
||||
allow_next_instance_of(ClickHouse::RebuildMaterializedViewService) do |instance|
|
||||
allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: { status: :over_time,
|
||||
next_value: 100 }))
|
||||
end
|
||||
|
||||
run_job
|
||||
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
data = Gitlab::Json.parse(redis.get(described_class.redis_key))
|
||||
expect(data['finished_at']).to eq(nil)
|
||||
expect(data['next_value']).to eq(100)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the previous run was interrupted' do
|
||||
before do
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
state = { started_at: 1.day.ago.to_json, next_value: 200 }
|
||||
redis.set(described_class.redis_key, Gitlab::Json.dump(state))
|
||||
end
|
||||
end
|
||||
|
||||
it 'continues from the the previously persisted next_value' do
|
||||
service = instance_double('ClickHouse::RebuildMaterializedViewService',
|
||||
execute: ServiceResponse.success(payload: { status: :finished }))
|
||||
|
||||
expect(ClickHouse::RebuildMaterializedViewService).to receive(:new) do |args|
|
||||
expect(args[:state][:next_value]).to eq(200)
|
||||
end.and_return(service)
|
||||
|
||||
run_job
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||