Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-06-13 15:17:56 +00:00
parent d939f38b0f
commit 4292fa7177
99 changed files with 1788 additions and 483 deletions

View File

@ -132,7 +132,7 @@ registry-with-cdn:
object-storage:
extends:
- .qa
- ee
- .ee
variables:
QA_SCENARIO: Test::Instance::Image
QA_RSPEC_TAGS: --tag object_storage

View File

@ -62,6 +62,15 @@ stages:
- echo "Running - '$QA_COMMAND'"
- eval "$QA_COMMAND"
.docker-in-docker:
services:
- docker:${DOCKER_VERSION}-dind
variables:
DOCKER_HOST: tcp://docker:2376
DOCKER_TLS_CERTDIR: "/certs"
DOCKER_TLS_VERIFY: 1
DOCKER_CERT_PATH: "$DOCKER_TLS_CERTDIR/client"
.qa:
extends:
- .bundler-variables

View File

@ -15,13 +15,11 @@ workflow:
stage: test
extends:
- .qa-cache
- .docker-in-docker
- .qa-run-e2e-with-bundler
needs: [build-cng]
tags: [e2e]
services:
- docker:${DOCKER_VERSION}-dind
variables:
DOCKER_HOST: tcp://docker:2375
FF_NETWORK_PER_BUILD: "true"
NAMESPACE: gitlab
KUBECONFIG: /root/.kube/config
@ -61,7 +59,7 @@ workflow:
fi
- cd qa
- bundle exec cng log events --save
- bundle exec cng log pods --save --containers all
- bundle exec cng log pods --save --containers all --no-fail-on-missing-pods
# This command prints all the necessary arguments to be able to recreate the same deployment as on CI
- |
bundle exec cng create deployment "${DEPLOYMENT_TYPE}" \

View File

@ -46,7 +46,7 @@ include:
image: "${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images/${BUILD_OS}-${OS_VERSION}-ruby-${RUBY_VERSION}:git-2.36-lfs-2.9-chrome-${CHROME_VERSION}-docker-${DOCKER_VERSION}-gcloud-383-kubectl-1.23"
extends:
- .qa-cache
- .default-retry
- .docker-in-docker
- .gitlab-qa-report
- .qa-run-e2e-with-bundler
stage: test
@ -60,7 +60,6 @@ include:
tags:
- e2e
variables:
DOCKER_HOST: tcp://docker:2375
QA_GENERATE_ALLURE_REPORT: "true"
QA_CAN_TEST_PRAEFECT: "false"
QA_INTERCEPT_REQUESTS: "false"

View File

@ -0,0 +1,85 @@
<--
Instructions:
1. Replace all occurrences of X.Y to the release that is targeted as an upgrade stop.
2. Give the issue a title: "X.Y upgrade stop planning"
3. Replace the <deadline date> with the ISO date 10 days before the [release date](https://about.gitlab.com/releases/) (this is always two Fridays before).
-->
This planning issue collects a list of changes that require an upgrade stop to X.Y.
## Notes for all engineering managers
To help to determine whether %"X.Y" is a required stop,
add your issue to the list below before `<deadline date>` (your local time).
You can review the [Common scenarios that require stops](https://docs.gitlab.com/ee/development/database/required_stops.html) and
[avoiding required stops](https://docs.gitlab.com/ee/development/avoiding_required_stops.html) to
confirm. If you are still not sure after reviewing the documentation, leave a
comment with the link to your issue or epic in this issue for others to review.
If your change requires an upgrade stop and cannot wait until %"X.Y", please
reach out to `@dorrino` and `@plu8`.
### List of changes that require an upgrade stop for version X.Y
- <epic or issue link>
## Notes for the issue author only
### After the issue creation
Slack message template:
>>>
The Distribution::Deploy group created an issue (link to this issue) to
determine if X.Y needs to be [a required upgrade
stop](https://docs.gitlab.com/ee/development/avoiding_required_stops.html#causes-of-required-stops). Please review your
upcoming changes and share any may require upgrade stop on the issue (link to
this issue), thank you.
>>>
- [ ] Update "Next Required Stop" bookmark in `#g_distribution` to this issue link.
- [ ] Update [EWIR](https://docs.google.com/document/d/1JBdCl3MAOSdlgq3kzzRmtzTsFWsTIQ9iQg0RHhMht6E/edit#heading=h.9qwiojcv4wzk).
- [ ] Use the previous Slack message template to post to `#engineering-fyi` and cross post to:
- [ ] `#eng-managers`
- [ ] `#cto`
### After the decision is made
#### If X.Y is an upgrade stop
Slack message template:
>>>
An update on the next upgrade stop (link to this issue), x.y is a planned
upgrade stop. It is a great opportunity to plan tasks as mentioned on
[Adding required stops](https://docs.gitlab.com/ee/development/database/required_stops.html)
and [Avoiding required stops](https://docs.gitlab.com/ee/development/avoiding_required_stops.html).
>>>
- [ ] Comment on this issue.
- [ ] Update [EWIR](https://docs.google.com/document/d/1JBdCl3MAOSdlgq3kzzRmtzTsFWsTIQ9iQg0RHhMht6E/edit#heading=h.9qwiojcv4wzk).
- [ ] Use the previous Slack message template to post to `#engineering-fyi` and cross post to:
- [ ] `#eng-managers`
- [ ] `#cto`
- [ ] `#whats-happening-at-gitlab`
- [ ] `#support_self-managed`
#### If X.Y is not an upgrade stop
Slack message template:
>>>
An update on the next upgrade stop (link to this issue), X.Y is NOT a planned upgrade stop.
>>>
- [ ] Comment on this issue.
- [ ] Update [EWIR](https://docs.google.com/document/d/1JBdCl3MAOSdlgq3kzzRmtzTsFWsTIQ9iQg0RHhMht6E/edit#heading=h.9qwiojcv4wzk).
- [ ] Use the previous Slack message template to post to `#engineering-fyi` and cross post to:
- [ ] `#eng-managers`
- [ ] `#cto`
/cc @gitlab-org/development-leaders

13
Gemfile
View File

@ -52,7 +52,7 @@ gem 'sprockets', '~> 3.7.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'view_component', '~> 3.12.1' # rubocop:todo Gemfile/MissingFeatureCategory
# Supported DBs
gem 'pg', '~> 1.5.6' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'pg', '~> 1.5.6', feature_category: :database
gem 'neighbor', '~> 0.3.2', feature_category: :duo_chat
@ -252,9 +252,12 @@ gem 'state_machines-activerecord', '~> 0.8.0' # rubocop:todo Gemfile/MissingFeat
gem 'acts-as-taggable-on', '~> 10.0' # rubocop:todo Gemfile/MissingFeatureCategory
# Background jobs
gem 'sidekiq', path: 'vendor/gems/sidekiq-7.1.6', require: 'sidekiq' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'sidekiq-cron', '~> 1.12.0', feature_category: :shared
gem 'gitlab-sidekiq-fetcher', path: 'vendor/gems/sidekiq-reliable-fetch', require: 'sidekiq-reliable-fetch' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'sidekiq', path: 'vendor/gems/sidekiq-7.1.6', require: 'sidekiq', feature_category: :scalability
gem 'sidekiq-cron', '~> 1.12.0', feature_category: :scalability
gem 'gitlab-sidekiq-fetcher',
path: 'vendor/gems/sidekiq-reliable-fetch',
require: 'sidekiq-reliable-fetch',
feature_category: :scalability
# Cron Parser
gem 'fugit', '~> 1.8.1' # rubocop:todo Gemfile/MissingFeatureCategory
@ -290,7 +293,7 @@ gem 'redis-clustering', '~> 5.2.0', feature_category: :redis
gem 'connection_pool', '~> 2.4' # rubocop:todo Gemfile/MissingFeatureCategory
# Redis session store
gem 'redis-actionpack', '~> 5.4.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'redis-actionpack', '~> 5.4.0', feature_category: :redis
# Discord integration
gem 'discordrb-webhooks', '~> 3.5', require: false, feature_category: :integrations

View File

@ -1,5 +1,5 @@
<script>
import { GlButton, GlSkeletonLoader } from '@gitlab/ui';
import { GlButton, GlSkeletonLoader, GlSearchBoxByType } from '@gitlab/ui';
import axios from '~/lib/utils/axios_utils';
import { sidebarEntriesToTree } from '../utils';
import WikiSidebarEntry from './wiki_sidebar_entry.vue';
@ -11,21 +11,34 @@ export default {
WikiSidebarEntry,
GlButton,
GlSkeletonLoader,
GlSearchBoxByType,
},
inject: ['sidebarPagesApi', 'hasCustomSidebar', 'viewAllPagesPath'],
props: {},
data() {
return {
allEntries: [],
entries: [],
totalCount: 0,
isLoadingContent: false,
searchTerm: '',
};
},
computed: {
countExceedsSidebarLimit() {
return this.totalCount > this.$options.SIDEBAR_LIMIT;
return this.totalCount > this.$options.SIDEBAR_LIMIT && !this.searchTerm;
},
},
watch: {
async searchTerm() {
this.entries = sidebarEntriesToTree(
this.allEntries
.filter((entry) => entry.title.toLowerCase().includes(this.searchTerm.toLowerCase()))
.slice(0, SIDEBAR_LIMIT),
);
},
},
@ -38,6 +51,7 @@ export default {
this.entries = sidebarEntriesToTree(entries.slice(0, SIDEBAR_LIMIT));
this.totalCount = entries.length;
this.allEntries = entries;
},
SIDEBAR_LIMIT,
};
@ -47,11 +61,19 @@ export default {
<gl-skeleton-loader />
</div>
<ul v-else class="wiki-pages" :class="{ 'gl-border-b !gl-pb-3': hasCustomSidebar }">
<wiki-sidebar-entry v-for="entry in entries" :key="entry.slug" :page="entry" />
<div
v-if="totalCount > $options.SIDEBAR_LIMIT"
class="gl-text-secondary gl-mt-3 gl-ml-3 gl-inline-block"
>
<gl-search-box-by-type
v-model.trim="searchTerm"
:placeholder="s__('Wiki|Search pages')"
class="gl-m-2"
@keyup.prevent.stop
/>
<wiki-sidebar-entry
v-for="entry in entries"
:key="entry.slug"
:page="entry"
:search-term="searchTerm"
/>
<div v-if="countExceedsSidebarLimit" class="gl-text-secondary gl-mt-3 gl-ml-3 gl-inline-block">
{{ sprintf(s__('Wiki|+ %{count} more'), { count: totalCount - $options.SIDEBAR_LIMIT }) }}
<span class="gl-px-2">&middot;</span>
</div>

View File

@ -1,7 +1,9 @@
<script>
import { GlIcon, GlButton, GlLink } from '@gitlab/ui';
import { escape } from 'lodash';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import { s__, sprintf } from '~/locale';
import SafeHtml from '~/vue_shared/directives/safe_html';
export default {
name: 'WikiSidebarEntry',
@ -11,12 +13,20 @@ export default {
GlButton,
LocalStorageSync,
},
directives: {
SafeHtml,
},
inject: ['canCreate'],
props: {
page: {
type: Object,
required: true,
},
searchTerm: {
type: String,
default: '',
required: false,
},
},
data() {
return {
@ -41,7 +51,16 @@ export default {
toggleCollapsed() {
this.isCollapsed = !this.isCollapsed;
},
highlight(text) {
return this.searchTerm
? String(escape(text)).replace(
new RegExp(this.searchTerm, 'i'),
(match) => `<strong>${match}</strong>`,
)
: escape(text);
},
},
safeHtmlConfig: { ALLOWED_TAGS: ['strong'] },
};
</script>
<template>
@ -55,14 +74,13 @@ export default {
@click="toggleCollapsed"
>
<gl-link
v-safe-html:[$options.safeHtmlConfig]="highlight(pageTitle)"
:href="page.path"
class="gl-str-truncated"
:data-qa-page-name="pageTitle"
:data-testid="page.children.length ? 'wiki-dir-page-link' : 'wiki-page-link'"
@click.stop
>
{{ pageTitle }}
</gl-link>
/>
<gl-button
v-if="canCreate"
icon="plus"
@ -82,7 +100,12 @@ export default {
/>
</span>
<ul v-if="page.children.length && !isCollapsed" dir="auto" class="!gl-pl-5">
<wiki-sidebar-entry v-for="child in page.children" :key="child.slug" :page="child" />
<wiki-sidebar-entry
v-for="child in page.children"
:key="child.slug"
:page="child"
:search-term="searchTerm"
/>
</ul>
</li>
</template>

View File

@ -6,6 +6,7 @@ import * as Sentry from '~/sentry/sentry_browser_wrapper';
import axios from '~/lib/utils/axios_utils';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import Tracking from '~/tracking';
import { logError } from '~/lib/logger';
import { getFormattedItem } from '../utils';
import {
@ -16,6 +17,7 @@ import {
ISSUE_HANDLE,
PATH_HANDLE,
PAGES_GROUP_TITLE,
SETTINGS_GROUP_TITLE,
PATH_GROUP_TITLE,
GROUP_TITLES,
MAX_ROWS,
@ -37,6 +39,7 @@ export default {
'commandPaletteCommands',
'commandPaletteLinks',
'autocompletePath',
'settingsPath',
'searchContext',
'projectFilesPath',
'projectBlobPath',
@ -82,6 +85,7 @@ export default {
break;
}
}, DEFAULT_DEBOUNCE_AND_THROTTLE_MS),
settings: [],
}),
computed: {
isCommandMode() {
@ -145,6 +149,9 @@ export default {
// Track immediately on component creation
const label = TRACKING_HANDLE_LABEL_MAP[value] ?? 'unknown';
this.track(TRACKING_ACTIVATE_COMMAND_PALETTE, { label });
// Fetch settings results only for ">"
if (value === COMMAND_HANDLE) this.fetchSettings();
},
immediate: true,
},
@ -153,6 +160,20 @@ export default {
this.$emit('updated');
},
methods: {
async fetchSettings() {
const projectId = this.searchContext.project.id;
if (projectId) {
await axios
.get(`${this.settingsPath}?project_id=${projectId}`)
.then((response) => {
this.settings = response.data;
})
.catch((e) => {
logError(e);
this.settings = [];
});
}
},
filterBySearchQuery(items, key = 'keywords') {
return fuzzaldrinPlus.filter(items, this.searchQuery, { key });
},
@ -193,6 +214,15 @@ export default {
items: matchedLinks,
});
}
const matchedSettings = this.filterBySearchQuery(this.settings, 'text');
if (matchedSettings.length) {
this.groups.push({
name: SETTINGS_GROUP_TITLE,
items: matchedSettings,
});
}
},
async getScopedItems() {
if (this.searchQuery?.length < 3) return;

View File

@ -52,6 +52,7 @@ export const PROJECTS_GROUP_TITLE = s__('GlobalSearch|Projects');
export const GROUPS_GROUP_TITLE = s__('GlobalSearch|Groups');
export const ISSUES_GROUP_TITLE = s__('GlobalSearch|Issues');
export const PATH_GROUP_TITLE = s__('CommandPalette|Project files');
export const SETTINGS_GROUP_TITLE = s__('CommandPalette|Settings');
export const MODAL_CLOSE_ESC = 'esc';
export const MODAL_CLOSE_BACKGROUND = 'backdrop';

View File

@ -27,7 +27,7 @@ export default {
:project-id="item.id"
:project-name="item.title"
:project-avatar-url="item.avatar"
:size="24"
:size="32"
aria-hidden="true"
/>

View File

@ -107,7 +107,14 @@ export const initSuperSidebar = () => {
const commandPaletteLinks = convertObjectPropsToCamelCase(sidebarData.current_menu_items || []);
const contextSwitcherLinks = sidebarData.context_switcher_links;
const { searchPath, issuesPath, mrPath, autocompletePath, searchContext } = searchData;
const {
searchPath,
issuesPath,
mrPath,
autocompletePath,
settingsPath,
searchContext,
} = searchData;
const isImpersonating = parseBoolean(sidebarData.is_impersonating);
const isGroup = Boolean(sidebarData.current_context?.namespace === CONTEXT_NAMESPACE_GROUPS);
@ -124,6 +131,7 @@ export const initSuperSidebar = () => {
commandPaletteLinks,
contextSwitcherLinks,
autocompletePath,
settingsPath,
searchContext,
projectFilesPath,
projectBlobPath,

View File

@ -120,6 +120,19 @@ class SearchController < ApplicationController
end
end
def settings
return render(json: []) unless current_user
project_id = params.require(:project_id)
project = Project.find_by(id: project_id) # rubocop: disable CodeReuse/ActiveRecord -- Using `find_by` as `find` would raise 404s
if project && current_user.can?(:admin_project, project)
render json: Search::Settings.new.for_project(project)
else
render json: []
end
end
def autocomplete
term = params.require(:term)

View File

@ -80,8 +80,6 @@ module Namespaces
def sort(items)
return items.projects_order_id_desc unless params[:sort]
return items.order_by_storage_size(:asc) if params[:sort] == :storage_size_asc
return items.order_by_storage_size(:desc) if params[:sort] == :storage_size_desc
if params[:sort] == :similarity && params[:search].present?
return items.sorted_by_similarity_desc(params[:search])

View File

@ -16,6 +16,8 @@ module Types
value 'PATH_DESC', 'Path by descending order.', value: :path_desc
value 'STARS_ASC', 'Stars by ascending order.', value: :stars_asc
value 'STARS_DESC', 'Stars by descending order.', value: :stars_desc
value 'STORAGE_SIZE_ASC', 'Storage size by ascending order.', value: :storage_size_asc
value 'STORAGE_SIZE_DESC', 'Storage size by descending order.', value: :storage_size_desc
end
end
end

View File

@ -190,6 +190,7 @@ module SidebarsHelper
issues_path: issues_dashboard_path,
mr_path: merge_requests_dashboard_path,
autocomplete_path: search_autocomplete_path,
settings_path: search_settings_path,
search_context: header_search_context
}
end

View File

@ -49,8 +49,7 @@ module Ci
)
end
# TODO: The usage counts will be populated by a worker that aggregates the data daily.
# See https://gitlab.com/gitlab-org/gitlab/-/issues/452545.
# The usage counts are updated daily by Ci::Catalog::Resources::AggregateLast30DayUsageWorker
scope :order_by_last_30_day_usage_count_desc, -> { reorder(last_30_day_usage_count: :desc) }
scope :order_by_last_30_day_usage_count_asc, -> { reorder(last_30_day_usage_count: :asc) }

View File

@ -10,6 +10,7 @@ module Ci
STARTED_STATUSES = %w[running success failed].freeze
ACTIVE_STATUSES = %w[waiting_for_resource preparing waiting_for_callback pending running].freeze
COMPLETED_STATUSES = %w[success failed canceled skipped].freeze
COMPLETED_WITH_MANUAL_STATUSES = COMPLETED_STATUSES + %w[manual]
STOPPED_STATUSES = COMPLETED_STATUSES + BLOCKED_STATUS
ORDERED_STATUSES = %w[failed preparing pending running waiting_for_callback waiting_for_resource manual scheduled canceling canceled success skipped created].freeze
PASSED_WITH_WARNINGS_STATUSES = %w[failed canceled].to_set.freeze
@ -48,7 +49,7 @@ module Ci
end
def completed_with_manual_statuses
completed_statuses + [:manual]
COMPLETED_WITH_MANUAL_STATUSES.map(&:to_sym)
end
def stopped_statuses
@ -121,7 +122,7 @@ module Ci
end
def complete_or_manual?
self.class.completed_with_manual_statuses.map(&:to_s).include?(status)
COMPLETED_WITH_MANUAL_STATUSES.include?(status)
end
def incomplete?

View File

@ -19,7 +19,8 @@ module Enums
rpm: 10,
deb: 11,
'cbl-mariner': 12,
wolfi: 13
wolfi: 13,
cargo: 14
}.with_indifferent_access.freeze
DEPENDENCY_SCANNING_PURL_TYPES = %w[
@ -31,6 +32,7 @@ module Enums
npm
nuget
pypi
cargo
].freeze
CONTAINER_SCANNING_PURL_TYPES = %w[

View File

@ -664,6 +664,8 @@ class Project < ApplicationRecord
# Sometimes queries (e.g. using CTEs) require explicit disambiguation with table name
scope :projects_order_id_asc, -> { reorder(self.arel_table['id'].asc) }
scope :projects_order_id_desc, -> { reorder(self.arel_table['id'].desc) }
scope :sorted_by_storage_size_asc, -> { order_by_storage_size(:asc) }
scope :sorted_by_storage_size_desc, -> { order_by_storage_size(:desc) }
scope :order_by_storage_size, ->(direction) do
build_keyset_order_on_joined_column(
scope: joins(:statistics),
@ -1002,10 +1004,10 @@ class Project < ApplicationRecord
def sort_by_attribute(method)
case method.to_s
when 'storage_size_asc'
sorted_by_storage_size_asc
when 'storage_size_desc'
# storage_size is a joined column so we need to
# pass a string to avoid AR adding the table name
reorder('project_statistics.storage_size DESC, projects.id DESC')
sorted_by_storage_size_desc
when 'latest_activity_desc'
sorted_by_updated_desc
when 'latest_activity_asc'

View File

@ -0,0 +1,75 @@
# frozen_string_literal: true
module Ci
module Catalog
module Resources
# This service aggregates CI component usage data and updates `last_30_day_usage_count` for
# each catalog resource daily. It utilizes Gitlab::Ci::Components::Usages::Aggregator which
# implements a "continue later" mechanism to process the data in time-boxed jobs.
# rubocop: disable CodeReuse/ActiveRecord -- Custom queries required
class AggregateLast30DayUsageService
include Gitlab::Utils::StrongMemoize
TARGET_MODEL = Ci::Catalog::Resource
GROUP_BY_COLUMN = :catalog_resource_id
WINDOW_LENGTH = 30.days
def execute
return ServiceResponse.success(message: "Processing complete for #{today}") if done_processing?
aggregator = Gitlab::Ci::Components::Usages::Aggregator.new(
target_model: TARGET_MODEL,
group_by_column: GROUP_BY_COLUMN,
usage_start_date: today - WINDOW_LENGTH,
usage_end_date: today - 1.day,
lease_key: lease_key
)
result = aggregator.each_batch do |usage_counts|
save_usage_counts!(usage_counts)
end
if result
ServiceResponse.success(message: 'Targets processed', payload: result.to_h)
else
ServiceResponse.success(message: 'Lease taken', payload: { lease_key: lease_key })
end
end
private
def done_processing?
min_updated_at = TARGET_MODEL.minimum(:last_30_day_usage_count_updated_at)
return true unless min_updated_at
min_updated_at >= today.to_time
end
def save_usage_counts!(usage_counts)
mapping = usage_counts.transform_values { |v| { last_30_day_usage_count: v } }
catalog_resource_ids = usage_counts.keys.map(&:id)
TARGET_MODEL.transaction do
Gitlab::Database::BulkUpdate.execute(%i[last_30_day_usage_count], mapping)
# Gitlab::Database::BulkUpdate does not support column type
# `:timestamptz` so we must update the timestamps separately.
TARGET_MODEL
.where(id: catalog_resource_ids)
.update_all(last_30_day_usage_count_updated_at: Time.current)
end
end
def today
Date.today
end
strong_memoize_attr :today
def lease_key
self.class.name
end
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
end

View File

@ -27,7 +27,7 @@
%template.js-project-permissions-form-data{ type: "application/json" }= project_permissions_panel_data(@project).to_json.html_safe
.js-project-permissions-form{ data: visibility_confirm_modal_data(@project, reduce_visibility_form_id) }
%section.settings.no-animate{ class: ('expanded' if expanded), data: { testid: 'badges-settings-content' } }
%section.settings.no-animate#js-badges-settings{ class: ('expanded' if expanded), data: { testid: 'badges-settings-content' } }
.settings-header
%h4.settings-title.js-settings-toggle.js-settings-toggle-trigger-only
= s_('ProjectSettings|Badges')

View File

@ -2,7 +2,7 @@
- setting = error_tracking_setting
%section.settings.no-animate.js-error-tracking-settings
%section.settings.no-animate#js-error-tracking-settings
.settings-header
%h4.settings-title.js-settings-toggle.js-settings-toggle-trigger-only
= _('Error tracking')

View File

@ -246,6 +246,15 @@
:weight: 1
:idempotent: false
:tags: []
- :name: cronjob:ci_catalog_resources_aggregate_last30_day_usage
:worker_name: Ci::Catalog::Resources::AggregateLast30DayUsageWorker
:feature_category: :pipeline_composition
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:ci_catalog_resources_process_sync_events
:worker_name: Ci::Catalog::Resources::ProcessSyncEventsWorker
:feature_category: :pipeline_composition

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
module Ci
module Catalog
module Resources
# This worker can be called multiple times simultaneously but only one can process data at a time.
# This is ensured by an exclusive lease guard in `Gitlab::Ci::Components::Usages::Aggregator`.
# The scheduling frequency should be == `Gitlab::Ci::Components::Usages::Aggregator::MAX_RUNTIME`
# so there is no time gap between job runs.
class AggregateLast30DayUsageWorker
include ApplicationWorker
include CronjobQueue # rubocop: disable Scalability/CronWorkerContext -- Periodic processing is required
feature_category :pipeline_composition
data_consistency :sticky
urgency :low
idempotent!
deduplicate :until_executed, if_deduplicated: :reschedule_once,
ttl: Gitlab::Ci::Components::Usages::Aggregator::WORKER_DEDUP_TTL
def perform
response = Ci::Catalog::Resources::AggregateLast30DayUsageService.new.execute
log_hash_metadata_on_done(
status: response.status,
message: response.message,
**response.payload
)
end
end
end
end
end

View File

@ -707,6 +707,9 @@ Settings.cron_jobs['namespaces_process_outdated_namespace_descendants_cron_worke
Settings.cron_jobs['performance_bar_stats'] ||= {}
Settings.cron_jobs['performance_bar_stats']['cron'] ||= '*/2 * * * *'
Settings.cron_jobs['performance_bar_stats']['job_class'] = 'GitlabPerformanceBarStatsWorker'
Settings.cron_jobs['ci_catalog_resources_aggregate_last30_day_usage_worker'] ||= {}
Settings.cron_jobs['ci_catalog_resources_aggregate_last30_day_usage_worker']['cron'] ||= '*/4 * * * *'
Settings.cron_jobs['ci_catalog_resources_aggregate_last30_day_usage_worker']['job_class'] = 'Ci::Catalog::Resources::AggregateLast30DayUsageWorker'
Gitlab.ee do
Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= {}

View File

@ -63,6 +63,7 @@ InitializerConnections.raise_if_new_database_connection do
# Search
get 'search' => 'search#show', as: :search
get 'search/autocomplete' => 'search#autocomplete', as: :search_autocomplete
get 'search/settings' => 'search#settings'
get 'search/count' => 'search#count', as: :search_count
get 'search/opensearch' => 'search#opensearch', as: :search_opensearch

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillVulnerabilityIssueLinksProjectId
description: Backfills sharding key `vulnerability_issue_links.project_id` from `vulnerabilities`.
feature_category: vulnerability_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156249
milestone: '17.1'
queued_migration_version: 20240613064362
finalize_after: '2024-07-22'
finalized_by: # version of the migration that finalized this BBM

View File

@ -19,3 +19,4 @@ desired_sharding_key:
table: vulnerabilities
sharding_key: project_id
belongs_to: vulnerability
desired_sharding_key_migration_job_name: BackfillVulnerabilityIssueLinksProjectId

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProjectIdToVulnerabilityIssueLinks < Gitlab::Database::Migration[2.2]
milestone '17.1'
def change
add_column :vulnerability_issue_links, :project_id, :bigint
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class IndexLast30DayUsageCountUpdatedAtOnCatalogResources < Gitlab::Database::Migration[2.2]
milestone '17.1'
disable_ddl_transaction!
INDEX_NAME = 'index_catalog_resources_on_last_30_day_usage_count_updated_at'
def up
add_concurrent_index :catalog_resources, :last_30_day_usage_count_updated_at, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :catalog_resources, INDEX_NAME
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class ChangeIndexPCatalogResourceComponentUsagesOnCatalogResourceId < Gitlab::Database::Migration[2.2]
include Gitlab::Database::PartitioningMigrationHelpers
milestone '17.1'
disable_ddl_transaction!
TABLE_NAME = :p_catalog_resource_component_usages
COLUMN_NAMES = [:catalog_resource_id, :used_by_project_id, :used_date]
INDEX_NAME = 'idx_component_usages_on_catalog_resource_used_by_proj_used_date'
OLD_COLUMN_NAMES = [:catalog_resource_id]
OLD_INDEX_NAME = 'idx_p_catalog_resource_component_usages_on_catalog_resource_id'
def up
add_concurrent_partitioned_index(TABLE_NAME, COLUMN_NAMES, name: INDEX_NAME)
remove_concurrent_partitioned_index_by_name(TABLE_NAME, OLD_INDEX_NAME)
end
def down
add_concurrent_partitioned_index(TABLE_NAME, OLD_COLUMN_NAMES, name: OLD_INDEX_NAME)
remove_concurrent_partitioned_index_by_name(TABLE_NAME, INDEX_NAME)
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexVulnerabilityIssueLinksOnProjectId < Gitlab::Database::Migration[2.2]
milestone '17.1'
disable_ddl_transaction!
INDEX_NAME = 'index_vulnerability_issue_links_on_project_id'
def up
add_concurrent_index :vulnerability_issue_links, :project_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :vulnerability_issue_links, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddVulnerabilityIssueLinksProjectIdFk < Gitlab::Database::Migration[2.2]
milestone '17.1'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :vulnerability_issue_links, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :vulnerability_issue_links, column: :project_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddVulnerabilityIssueLinksProjectIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.1'
def up
install_sharding_key_assignment_trigger(
table: :vulnerability_issue_links,
sharding_key: :project_id,
parent_table: :vulnerabilities,
parent_sharding_key: :project_id,
foreign_key: :vulnerability_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :vulnerability_issue_links,
sharding_key: :project_id,
parent_table: :vulnerabilities,
parent_sharding_key: :project_id,
foreign_key: :vulnerability_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillVulnerabilityIssueLinksProjectId < Gitlab::Database::Migration[2.2]
milestone '17.1'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillVulnerabilityIssueLinksProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:vulnerability_issue_links,
:id,
:project_id,
:vulnerabilities,
:project_id,
:vulnerability_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:vulnerability_issue_links,
:id,
[
:project_id,
:vulnerabilities,
:project_id,
:vulnerability_id
]
)
end
end

View File

@ -0,0 +1 @@
48e27a3376ea15329fed626a4839d5929affd797628e50b7c530741da92e8639

View File

@ -0,0 +1 @@
87fd0c7f40c011772c12e74f93649bb2fa6c130da0f7a877f423099f94cebb35

View File

@ -0,0 +1 @@
86d1e1173af7da1af4e9545d83d73165bed501041985c2b126a9153b39e7bee3

View File

@ -0,0 +1 @@
6853e049fa8dcd19033d0387949e6a79cdbeba75ce75d977b088c4e372cdc8b8

View File

@ -0,0 +1 @@
48390874b900b4dbf6a96335664c592bd636c707c72d64212cd1a78c6fa0a8a2

View File

@ -0,0 +1 @@
e02b646915066f70e3cc01c8416b1a1e235bd3f6243280ff7e162938cc44fee7

View File

@ -0,0 +1 @@
9534f4323f2d4fdbaa9a310b3c4c0af82debbe8809714ce48162ff798d7e9462

View File

@ -902,6 +902,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_25fe4f7da510() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."project_id" IS NULL THEN
SELECT "project_id"
INTO NEW."project_id"
FROM "vulnerabilities"
WHERE "vulnerabilities"."id" = NEW."vulnerability_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_2ac3d66ed1d3() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -18399,6 +18415,28 @@ CREATE SEQUENCE vulnerabilities_id_seq
ALTER SEQUENCE vulnerabilities_id_seq OWNED BY vulnerabilities.id;
CREATE TABLE vulnerability_export_parts (
id bigint NOT NULL,
vulnerability_export_id bigint NOT NULL,
start_id bigint NOT NULL,
end_id bigint NOT NULL,
organization_id bigint DEFAULT 1 NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
file_store integer,
file text,
CONSTRAINT check_baded21d39 CHECK ((char_length(file) <= 255))
);
CREATE SEQUENCE vulnerability_export_parts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE vulnerability_export_parts_id_seq OWNED BY vulnerability_export_parts.id;
CREATE TABLE vulnerability_exports (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -18424,28 +18462,6 @@ CREATE SEQUENCE vulnerability_exports_id_seq
ALTER SEQUENCE vulnerability_exports_id_seq OWNED BY vulnerability_exports.id;
CREATE TABLE vulnerability_export_parts (
id bigint NOT NULL,
vulnerability_export_id bigint NOT NULL,
start_id bigint NOT NULL,
end_id bigint NOT NULL,
organization_id bigint DEFAULT 1 NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
file_store integer,
file text,
CONSTRAINT check_baded21d39 CHECK ((char_length(file) <= 255))
);
CREATE SEQUENCE vulnerability_export_parts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE vulnerability_export_parts_id_seq OWNED BY vulnerability_export_parts.id;
CREATE TABLE vulnerability_external_issue_links (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -18644,7 +18660,8 @@ CREATE TABLE vulnerability_issue_links (
issue_id bigint NOT NULL,
link_type smallint DEFAULT 1 NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL
updated_at timestamp with time zone NOT NULL,
project_id bigint
);
CREATE SEQUENCE vulnerability_issue_links_id_seq
@ -25065,6 +25082,8 @@ CREATE INDEX idx_ci_running_builds_on_runner_type_and_owner_xid_and_id ON ci_run
CREATE INDEX idx_compliance_security_policies_on_policy_configuration_id ON compliance_framework_security_policies USING btree (policy_configuration_id);
CREATE INDEX idx_component_usages_on_catalog_resource_used_by_proj_used_date ON ONLY p_catalog_resource_component_usages USING btree (catalog_resource_id, used_by_project_id, used_date);
CREATE UNIQUE INDEX idx_component_usages_on_component_used_by_project_and_used_date ON ONLY p_catalog_resource_component_usages USING btree (component_id, used_by_project_id, used_date);
CREATE INDEX idx_container_exp_policies_on_project_id_next_run_at ON container_expiration_policies USING btree (project_id, next_run_at) WHERE (enabled = true);
@ -25181,8 +25200,6 @@ CREATE INDEX idx_on_protected_branch ON approval_group_rules_protected_branches
CREATE INDEX idx_open_issues_on_project_and_confidential_and_author_and_id ON issues USING btree (project_id, confidential, author_id, id) WHERE (state_id = 1);
CREATE INDEX idx_p_catalog_resource_component_usages_on_catalog_resource_id ON ONLY p_catalog_resource_component_usages USING btree (catalog_resource_id);
CREATE INDEX idx_packages_debian_group_component_files_on_architecture_id ON packages_debian_group_component_files USING btree (architecture_id);
CREATE INDEX idx_packages_debian_project_component_files_on_architecture_id ON packages_debian_project_component_files USING btree (architecture_id);
@ -25691,6 +25708,8 @@ CREATE INDEX index_catalog_resource_versions_on_resource_id_and_released_at ON c
CREATE INDEX index_catalog_resources_on_last_30_day_usage_count ON catalog_resources USING btree (last_30_day_usage_count) WHERE (state = 1);
CREATE INDEX index_catalog_resources_on_last_30_day_usage_count_updated_at ON catalog_resources USING btree (last_30_day_usage_count_updated_at);
CREATE UNIQUE INDEX index_catalog_resources_on_project_id ON catalog_resources USING btree (project_id);
CREATE INDEX index_catalog_resources_on_search_vector ON catalog_resources USING gin (search_vector);
@ -28917,6 +28936,8 @@ CREATE UNIQUE INDEX index_vulnerability_identifiers_on_project_id_and_fingerprin
CREATE INDEX index_vulnerability_issue_links_on_issue_id ON vulnerability_issue_links USING btree (issue_id);
CREATE INDEX index_vulnerability_issue_links_on_project_id ON vulnerability_issue_links USING btree (project_id);
CREATE INDEX index_vulnerability_merge_request_links_on_merge_request_id ON vulnerability_merge_request_links USING btree (merge_request_id);
CREATE INDEX index_vulnerability_merge_request_links_on_project_id ON vulnerability_merge_request_links USING btree (project_id);
@ -30971,6 +30992,8 @@ CREATE TRIGGER trigger_2514245c7fc5 BEFORE INSERT OR UPDATE ON dast_site_profile
CREATE TRIGGER trigger_25c44c30884f BEFORE INSERT OR UPDATE ON work_item_parent_links FOR EACH ROW EXECUTE FUNCTION trigger_25c44c30884f();
CREATE TRIGGER trigger_25fe4f7da510 BEFORE INSERT OR UPDATE ON vulnerability_issue_links FOR EACH ROW EXECUTE FUNCTION trigger_25fe4f7da510();
CREATE TRIGGER trigger_2ac3d66ed1d3 BEFORE INSERT OR UPDATE ON vulnerability_occurrence_pipelines FOR EACH ROW EXECUTE FUNCTION trigger_2ac3d66ed1d3();
CREATE TRIGGER trigger_2b8fdc9b4a4e BEFORE INSERT OR UPDATE ON ml_experiment_metadata FOR EACH ROW EXECUTE FUNCTION trigger_2b8fdc9b4a4e();
@ -31144,6 +31167,9 @@ ALTER TABLE ONLY sbom_occurrences_vulnerabilities
ALTER TABLE ONLY ai_agent_version_attachments
ADD CONSTRAINT fk_07db0a0e5b FOREIGN KEY (ai_agent_version_id) REFERENCES ai_agent_versions(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_issue_links
ADD CONSTRAINT fk_081e11030b FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY abuse_report_user_mentions
ADD CONSTRAINT fk_088018ecd8 FOREIGN KEY (abuse_report_id) REFERENCES abuse_reports(id) ON DELETE CASCADE;

View File

@ -20,6 +20,7 @@ For more information, see the history.
WARNING:
This feature is considered [experimental](../../policy/experiment-beta-support.md) and is not intended for customer usage outside of initial design partners. We expect major changes to this feature.
For GitLab 17.1 the feature has a deadline until 31 Aug 2024.
DISCLAIMER:
This page contains information related to upcoming products, features, and functionality.

View File

@ -35244,6 +35244,8 @@ Values for sorting projects.
| <a id="projectsortpath_desc"></a>`PATH_DESC` | Path by descending order. |
| <a id="projectsortstars_asc"></a>`STARS_ASC` | Stars by ascending order. |
| <a id="projectsortstars_desc"></a>`STARS_DESC` | Stars by descending order. |
| <a id="projectsortstorage_size_asc"></a>`STORAGE_SIZE_ASC` | Storage size by ascending order. |
| <a id="projectsortstorage_size_desc"></a>`STORAGE_SIZE_DESC` | Storage size by descending order. |
| <a id="projectsortupdated_asc"></a>`UPDATED_ASC` | Updated at ascending order. |
| <a id="projectsortupdated_desc"></a>`UPDATED_DESC` | Updated at descending order. |
| <a id="projectsortcreated_asc"></a>`created_asc` **{warning-solid}** | **Deprecated** in GitLab 13.5. This was renamed. Use: `CREATED_ASC`. |

View File

@ -1451,6 +1451,53 @@ DETAILS:
**Tier:** Premium, Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
### List Service Account Users
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/416729) in GitLab 17.1.
Prerequisites:
- You must be an administrator of the self-managed instance, or have the Owner role for the group.
Lists all service account users that are provisioned by group.
This function takes pagination parameters `page` and `per_page` to restrict the list of users.
```plaintext
GET /groups/:id/service_accounts
```
Example request:
```shell
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/345/service_accounts"
```
Supported attributes:
| Attribute | Type | Required | Description |
|:-------------|:---------|:-----------|:----------------------------------------------------------------|
| `order_by` | string | no | Orders list of users by `username` or `id`. Default is `id`. |
| `sort` | string | no | Specifies sorting by `asc` or `desc`. Default is `desc`. |
Example response:
```json
[
{
"id": 57,
"username": "service_account_group_345_<random_hash>",
"name": "Service account user"
},
{
"id": 58,
"username": "service_account_group_345_<random_hash>",
"name": "Service account user"
}
]
```
### Create Service Account User
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/407775) in GitLab 16.1.

View File

@ -1124,6 +1124,58 @@ Example response:
}
```
## List service account users
DETAILS:
**Tier:** Premium, Ultimate
**Offering:** Self-managed, GitLab Dedicated
> - Ability to list all service account users [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/416729) in GitLab 17.1.
Prerequisites:
- You must be an administrator of the self-managed instance.
Lists all service account users.
This function takes pagination parameters `page` and `per_page` to restrict the list of users.
This API endpoint requires the user to be an instance admin.
Example request:
```plaintext
GET /service_accounts
```
```shell
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/service_accounts"
```
Supported attributes:
| Attribute | Type | Required | Description |
|:-------------|:---------|:----------|:------------------------------------------------------------|
| `order_by` | string | no | Orders list of users by `username` or `id` Default is `id`. |
| `sort` | string | no | Specifies sorting by `asc` or `desc`. Default is `desc`. |
Example response:
```json
[
{
"id": 114,
"username": "service_account_33",
"name": "Service account user"
},
{
"id": 137,
"username": "service_account_34",
"name": "john doe"
}
]
```
## List user projects
See the [list of user projects](projects.md#list-user-projects).
@ -2530,7 +2582,7 @@ Example response:
```json
{
"id": 9171,
"token": "glrt-kyahzxLaj4Dc1jQf4xjX",
"token": "<access-token>",
"token_expires_at": null
}
```

View File

@ -275,11 +275,10 @@ funnels must be defined in the pointer project.
1. In the `.gitlab/analytics/` directory, create a directory named `funnels`.
1. In the new `.gitlab/analytics/funnels` directory, create a funnel definition YAML file.
Funnel definitions must include the keys `name` and `seconds_to_convert`, and an array of `steps`.
Funnel definitions must include the key `seconds_to_convert` and an array of `steps`.
| Key | Description |
|----------------------|----------------------------------------------------------|
| `name` | The name of the funnel. |
| `seconds_to_convert` | The number of seconds a user has to complete the funnel. |
| `steps` | An array of funnel steps. |
@ -296,7 +295,6 @@ Each step must include the keys `name`, `target`, and `action`.
The following example defines a funnel that tracks users who completed a purchase within one hour by going through three target pages:
```yaml
name: completed_purchase
seconds_to_convert: 3600
steps:
- name: view_page_1
@ -315,6 +313,13 @@ steps:
You can [query the funnel data with the REST API](../../api/product_analytics.md#send-query-request-to-cube).
To do this, you can use the example query body below, where you need to replace `FUNNEL_NAME` with your funnel's name.
NOTE:
The name of a funnel is generated from the filename of the funnel definition YAML file,
by separating words with underscores and removing special characters.
For example, for a funnel definition file in `.gitlab/analytics/funnels/Successful Conversions.yaml`
the funnel name is `successful_conversions`.
This funnel name can be referenced in visualization definitions.
NOTE:
The `afterDate` filter is not supported. Use `beforeDate` or `inDateRange`.
@ -371,10 +376,6 @@ options:
type: value
```
NOTE:
The funnel name defined in the YAML definition is converted to a slug that can be referenced in visualization definitions.
For example, the funnel name `Successful Conversions` is converted to `successful_conversions`.
When funnel definitions and visualizations are ready, you can [create a custom dashboard](../analytics/analytics_dashboards.md#create-a-custom-dashboard) to visualize funnel analysis behavior.
## Raw data export

View File

@ -51,6 +51,8 @@ Prerequisites:
This service account is associated only with your top-level group.
1. [List all service account users](../../api/groups.md#list-service-account-users).
1. [Create a personal access token](../../api/groups.md#create-personal-access-token-for-service-account-user)
for the service account user.
@ -74,6 +76,8 @@ Prerequisites:
This service account is associated with the entire instance, not a specific group
or project in the instance.
1. [List all service account users](../../api/users.md#list-service-account-users).
1. [Create a personal access token](../../api/users.md#create-a-personal-access-token)
for the service account user.

View File

@ -29,6 +29,10 @@ module Gitlab
desc: "Save logs to a file instead of printing to stdout",
type: :boolean,
default: false
option :fail_on_missing_pods,
desc: "Fail if no pods are found",
type: :boolean,
default: true
def pods(name = "")
logs = kubeclient.pod_logs(name.split(","), since: options[:since], containers: options[:containers])
@ -45,6 +49,12 @@ module Gitlab
log("Logs for pod '#{pod_name}'", :success)
puts pod_logs
end
rescue Kubectl::Client::Error => e
raise(e) unless ["No pods matched", "No pods found in namespace"].any? { |msg| e.message.include?(msg) }
fail_on_missing_pods = options[:fail_on_missing_pods]
log(e.message, fail_on_missing_pods ? :error : :warn)
exit(1) if fail_on_missing_pods
end
desc "events", "Log cluster events"

View File

@ -48,7 +48,7 @@ module Gitlab
def component_ci_versions
{
"gitlab.gitaly.image.repository" => "#{IMAGE_REPOSITORY}/gitaly",
"gitlab.gitaly.image.tag" => gitaly_version,
"gitlab.gitaly.image.tag" => semver?(gitaly_version) ? "v#{gitaly_version}" : gitaly_version,
"gitlab.gitlab-shell.image.repository" => "#{IMAGE_REPOSITORY}/gitlab-shell",
"gitlab.gitlab-shell.image.tag" => "v#{gitlab_shell_version}",
"gitlab.migrations.image.repository" => "#{IMAGE_REPOSITORY}/gitlab-toolbox-ee",
@ -65,6 +65,16 @@ module Gitlab
"gitlab.webservice.workhorse.tag" => commit_sha
}
end
private
# Semver compatible version
#
# @param [String] version
# @return [Boolean]
def semver?(version)
version.match?(/^[0-9]+\.[0-9]+\.[0-9]+(-rc[0-9]+)?(-ee)?$/)
end
end
end
end

View File

@ -1 +0,0 @@
7aa06a578d76bdc294ee8e9acb4f063e7d9f1d5f

View File

@ -1 +0,0 @@
14.35.0

View File

@ -51,6 +51,26 @@ RSpec.describe Gitlab::Cng::Commands::Log do
expect(File).to have_received(:write).with("pod-1.log", "log-1")
expect(File).to have_received(:write).with("pod-2.log", "log-2")
end
it "raises error when no pod is found" do
allow(kubeclient).to receive(:pod_logs).and_raise(
Gitlab::Cng::Kubectl::Client::Error, "No pods found in namespace 'gitlab'"
)
expect do
expect { invoke_command(command_name) }.to output(/No pods found in namespace 'gitlab'/).to_stdout
end.to raise_error(SystemExit)
end
it "prints warning with --no-fail-on-missing-pods argument" do
allow(kubeclient).to receive(:pod_logs).and_raise(
Gitlab::Cng::Kubectl::Client::Error, "No pods found in namespace 'gitlab'"
)
expect do
invoke_command(command_name, [], { fail_on_missing_pods: false })
end.to output(/No pods found in namespace 'gitlab'/).to_stdout
end
end
describe "events command" do

View File

@ -0,0 +1,84 @@
# frozen_string_literal: true
RSpec.describe Gitlab::Cng::Deployment::DefaultValues do
let(:ci_project_dir) { "/builds/dir" }
let(:ci_commit_sha) { "0acb5ee6db0860436fafc2c31a2cd87849c51aa3" }
let(:ci_short_sha) { "0acb5ee6db08" }
let(:image_repository) { "registry.gitlab.com/gitlab-org/build/cng-mirror" }
let(:gitaly_version) { "7aa06a578d76bdc294ee8e9acb4f063e7d9f1d5f" }
let(:shell_version) { "14.0.5" }
let(:env) do
{
"CI_PROJECT_DIR" => ci_project_dir,
"CI_COMMIT_SHA" => ci_commit_sha,
"CI_COMMIT_SHORT_SHA" => ci_short_sha
}
end
before do
described_class.instance_variable_set(:@ci_project_dir, nil)
described_class.instance_variable_set(:@gitaly_version, nil)
allow(File).to receive(:read).with(File.join(ci_project_dir, "GITALY_SERVER_VERSION")).and_return(gitaly_version)
allow(File).to receive(:read).with(File.join(ci_project_dir, "GITLAB_SHELL_VERSION")).and_return(shell_version)
end
around do |example|
ClimateControl.modify(env) { example.run }
end
it "returns correct common values" do
expect(described_class.common_values("domain")).to eq({
global: {
hosts: {
domain: "domain",
https: false
},
ingress: {
configureCertmanager: false,
tls: {
enabled: false
}
},
appConfig: {
applicationSettingsCacheSeconds: 0
}
},
gitlab: { "gitlab-exporter": { enabled: false } },
redis: { metrics: { enabled: false } },
prometheus: { install: false },
certmanager: { install: false },
"gitlab-runner": { install: false }
})
end
it "returns correct ci components" do
expect(described_class.component_ci_versions).to eq({
"gitlab.gitaly.image.repository" => "#{image_repository}/gitaly",
"gitlab.gitaly.image.tag" => gitaly_version,
"gitlab.gitlab-shell.image.repository" => "#{image_repository}/gitlab-shell",
"gitlab.gitlab-shell.image.tag" => "v#{shell_version}",
"gitlab.migrations.image.repository" => "#{image_repository}/gitlab-toolbox-ee",
"gitlab.migrations.image.tag" => ci_commit_sha,
"gitlab.toolbox.image.repository" => "#{image_repository}/gitlab-toolbox-ee",
"gitlab.toolbox.image.tag" => ci_commit_sha,
"gitlab.sidekiq.annotations.commit" => ci_short_sha,
"gitlab.sidekiq.image.repository" => "#{image_repository}/gitlab-sidekiq-ee",
"gitlab.sidekiq.image.tag" => ci_commit_sha,
"gitlab.webservice.annotations.commit" => ci_short_sha,
"gitlab.webservice.image.repository" => "#{image_repository}/gitlab-webservice-ee",
"gitlab.webservice.image.tag" => ci_commit_sha,
"gitlab.webservice.workhorse.image" => "#{image_repository}/gitlab-workhorse-ee",
"gitlab.webservice.workhorse.tag" => ci_commit_sha
})
end
context "with semver gitaly version" do
let(:gitaly_version) { "17.0.1" }
it "correctly sets gitaly image tag" do
expect(described_class.component_ci_versions["gitlab.gitaly.image.tag"]).to eq("v#{gitaly_version}")
end
end
end

View File

@ -38,44 +38,18 @@ RSpec.describe Gitlab::Cng::Deployment::Installation, :aggregate_failures do
)
end
let(:env) do
{
"QA_EE_LICENSE" => "license",
"CI_PROJECT_DIR" => File.expand_path("../../../../fixture", __dir__),
"CI_COMMIT_SHA" => "0acb5ee6db0860436fafc2c31a2cd87849c51aa3",
"CI_COMMIT_SHORT_SHA" => "0acb5ee6db08"
}
end
let(:values_yml) do
let(:expected_values_yml) do
{
global: {
hosts: {
domain: gitlab_domain,
https: false
},
ingress: {
configureCertmanager: false,
tls: {
enabled: false
}
},
appConfig: {
applicationSettingsCacheSeconds: 0
},
common: "val",
extraEnv: {
GITLAB_LICENSE_MODE: "test",
CUSTOMER_PORTAL_URL: "https://customers.staging.gitlab.com"
}
},
gitlab: {
"gitlab-exporter": { enabled: false },
license: { secret: "gitlab-license" }
},
redis: { metrics: { enabled: false } },
prometheus: { install: false },
certmanager: { install: false },
"gitlab-runner": { install: false },
**config_values
}.deep_stringify_keys.to_yaml
end
@ -85,12 +59,15 @@ RSpec.describe Gitlab::Cng::Deployment::Installation, :aggregate_failures do
allow(Gitlab::Cng::Kubectl::Client).to receive(:new).with("gitlab").and_return(kubeclient)
allow(Gitlab::Cng::Helm::Client).to receive(:new).and_return(helmclient)
allow(Gitlab::Cng::Deployment::Configurations::Kind).to receive(:new).and_return(configuration)
allow(Gitlab::Cng::Deployment::DefaultValues).to receive(:common_values).with(gitlab_domain).and_return({
global: { common: "val" }
})
allow(installation).to receive(:execute_shell)
end
around do |example|
ClimateControl.modify(env) { example.run }
ClimateControl.modify({ "QA_EE_LICENSE" => "license" }) { example.run }
end
context "without ci" do
@ -105,7 +82,7 @@ RSpec.describe Gitlab::Cng::Deployment::Installation, :aggregate_failures do
chart_reference,
namespace: "gitlab",
timeout: "10m",
values: values_yml,
values: expected_values_yml,
args: []
)
@ -121,6 +98,11 @@ RSpec.describe Gitlab::Cng::Deployment::Installation, :aggregate_failures do
context "with ci and specific sha" do
let(:ci) { true }
let(:chart_sha) { "sha" }
let(:ci_components) { { "gitlab.gitaly.image.repository" => "repo", "gitlab.gitaly.image.tag" => "tag" } }
before do
allow(Gitlab::Cng::Deployment::DefaultValues).to receive(:component_ci_versions).and_return(ci_components)
end
it "runs helm install with correctly merged values and component versions" do
expect { installation.create }.to output(/Creating CNG deployment 'gitlab'/).to_stdout
@ -131,27 +113,8 @@ RSpec.describe Gitlab::Cng::Deployment::Installation, :aggregate_failures do
chart_reference,
namespace: "gitlab",
timeout: "10m",
values: values_yml,
# rubocop:disable Layout/LineLength -- fitting the args in to 120 would make the definition quite unreadable
args: %W[
--set gitlab.gitaly.image.repository=registry.gitlab.com/gitlab-org/build/cng-mirror/gitaly
--set gitlab.gitaly.image.tag=7aa06a578d76bdc294ee8e9acb4f063e7d9f1d5f
--set gitlab.gitlab-shell.image.repository=registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-shell
--set gitlab.gitlab-shell.image.tag=v14.35.0
--set gitlab.migrations.image.repository=registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-toolbox-ee
--set gitlab.migrations.image.tag=#{env['CI_COMMIT_SHA']}
--set gitlab.toolbox.image.repository=registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-toolbox-ee
--set gitlab.toolbox.image.tag=#{env['CI_COMMIT_SHA']}
--set gitlab.sidekiq.annotations.commit=#{env['CI_COMMIT_SHORT_SHA']}
--set gitlab.sidekiq.image.repository=registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-sidekiq-ee
--set gitlab.sidekiq.image.tag=#{env['CI_COMMIT_SHA']}
--set gitlab.webservice.annotations.commit=#{env['CI_COMMIT_SHORT_SHA']}
--set gitlab.webservice.image.repository=registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-webservice-ee
--set gitlab.webservice.image.tag=#{env['CI_COMMIT_SHA']}
--set gitlab.webservice.workhorse.image=registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-workhorse-ee
--set gitlab.webservice.workhorse.tag=#{env['CI_COMMIT_SHA']}
]
# rubocop:enable Layout/LineLength
values: expected_values_yml,
args: ci_components.flat_map { |k, v| ["--set", "#{k}=#{v}"] }
)
end
end

View File

@ -194,8 +194,8 @@ module Keeps
[Flaky tests management process](https://handbook.gitlab.com/handbook/engineering/infrastructure/engineering-productivity/flaky-tests-management-and-processes/#flaky-tests-management-process)
to help us increase `master` stability.
Please let us know your feedback
[in the dedicated issue](https://gitlab.com/gitlab-org/quality/engineering-productivity/team/-/issues/447).
Please let us know your feedback in the
[Engineering Productivity issue tracker](https://gitlab.com/gitlab-org/quality/engineering-productivity/team/-/issues).
Related to #{flaky_issue['web_url']}.
MARKDOWN

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillVulnerabilityIssueLinksProjectId < BackfillDesiredShardingKeyJob
operation_name :backfill_vulnerability_issue_links_project_id
feature_category :vulnerability_management
end
end
end

View File

@ -7,30 +7,38 @@ module Gitlab
# Component usage is defined as the number of unique `used_by_project_id`s in the table
# `p_catalog_resource_component_usages` for a given scope.
#
# This aggregator iterates through the target scope in batches. For each target ID, it collects
# the usage count using `distinct_each_batch` for the given usage window. Since this process can
# be interrupted when it reaches MAX_RUNTIME, we utilize a Redis cursor so the aggregator can
# resume from where it left off on each run. We collect the count in Rails because the SQL query
# `COUNT(DISTINCT(*))` is not performant when the dataset is large.
# This aggregator is intended to be run in a scheduled cron job. It implements a "continue later"
# mechanism with a Redis cursor, which enables the work to continue from where it was last interrupted
# on each run. It iterates through the target table in batches, in order of ID ascending. For each
# target ID, it collects the usage count using `distinct_each_batch` for the given usage window.
# We collect the count in Rails because the SQL query `COUNT(DISTINCT(*))` is not performant when the
# data volume is large.
#
# RUNTIME: The actual total runtime will be slightly longer than MAX_RUNTIME because
# RUNTIME: The actual total runtime will be longer than MAX_RUNTIME because
# it depends on the execution time of `&usage_counts_block`.
# EXCLUSIVE LEASE: This aggregator is protected from parallel processing with an exclusive lease guard.
# WORKER: The worker running this service should be scheduled at the same cadence as MAX_RUNTIME, with:
# deduplicate :until_executed, if_deduplicated: :reschedule_once, ttl: LEASE_TIMEOUT
# deduplicate :until_executed, if_deduplicated: :reschedule_once, ttl: WORKER_DEDUP_TTL
# STOPPING: When the aggregator's cursor advances past the max target_id, it resets to 0. This means
# it may reprocess targets that have already been processed for the given usage window.
# To minimize redundant reprocessing, you should prevent the aggregator from running once it
# meets a certain stop condition (e.g. when all targets have been marked as "processed").
#
##### Usage
#
# each_batch:
# - Yields each batch of `usage_counts` to the given block.
# - The block should be able to handle targets that might be reprocessed multiple times.
# - Yields each batch of `usage_counts` to the given block. The block should:
# - Be able to handle targets that might be reprocessed multiple times.
# - Not exceed 1 minute in execution time.
# - `usage_counts` format: { target_object1 => 100, target_object2 => 200, ... }
# - If the lease is obtained, returns a Result containing the `cursor` object and
# `total_targets_completed`. Otherwise, returns nil.
# - If the lease is obtained, returns a Result containing `total_targets_completed` and
# `cursor_attributes`. Otherwise, returns nil.
#
# Example:
# return if done_processing?
#
# aggregator = Gitlab::Ci::Components::Usages::Aggregator.new(
# target_scope: Ci::Catalog::Resource.scope_to_get_only_unprocessed_targets,
# target_model: Ci::Catalog::Resource,
# group_by_column: :catalog_resource_id,
# usage_start_date: Date.today - 30.days,
# usage_end_date: Date.today - 1.day,
@ -43,37 +51,32 @@ module Gitlab
#
##### Parameters
#
# target_scope:
# - ActiveRecord relation to retrieve the target IDs. Processed in order of ID ascending.
# - The target model class should have `include EachBatch`.
# - When cursor.target_id gets reset to 0, the aggregator may reprocess targets that have
# already been processed for the given usage window. To minimize redundant reprocessing,
# add a limiting condition to the target scope so it only retrieves unprocessed targets.
# group_by_column: This should be the usage table's foreign key of the target_scope.
# target_model: Target model to iterate through. Model class should contain `include EachBatch`.
# group_by_column: This should be the usage table's foreign key of the target_model.
# usage_start_date & usage_end_date: Date objects specifiying the window of usage data to aggregate.
# lease_key: Used for obtaining an exclusive lease. Also used as part of the cursor Redis key.
#
# rubocop: disable CodeReuse/ActiveRecord -- Custom queries required for data processing
class Aggregator
include Gitlab::Utils::StrongMemoize
include ExclusiveLeaseGuard
Result = Struct.new(:cursor, :total_targets_completed, keyword_init: true)
Result = Struct.new(:total_targets_completed, :cursor_attributes, keyword_init: true)
TARGET_BATCH_SIZE = 1000
DISTINCT_USAGE_BATCH_SIZE = 100
MAX_RUNTIME = 4.minutes # Should be >= job scheduling frequency so there is no gap between job runs
LEASE_TIMEOUT = 5.minutes # Should be MAX_RUNTIME + extra time to execute `&usage_counts_block`
WORKER_DEDUP_TTL = MAX_RUNTIME + 1.minute # Includes extra time to execute `&usage_counts_block`
LEASE_TIMEOUT = 10.minutes
def initialize(target_scope:, group_by_column:, usage_start_date:, usage_end_date:, lease_key:)
@target_scope = target_scope
def initialize(target_model:, group_by_column:, usage_start_date:, usage_end_date:, lease_key:)
@target_model = target_model
@group_by_column = group_by_column
@lease_key = lease_key # Used by ExclusiveLeaseGuard
@runtime_limiter = Gitlab::Metrics::RuntimeLimiter.new(MAX_RUNTIME)
@cursor = Aggregators::Cursor.new(
redis_key: "#{lease_key}:cursor",
target_scope: target_scope,
target_model: target_model,
usage_window: Aggregators::Cursor::Window.new(usage_start_date, usage_end_date)
)
end
@ -82,17 +85,18 @@ module Gitlab
try_obtain_lease do
total_targets_completed = process_targets(&usage_counts_block)
Result.new(cursor: cursor, total_targets_completed: total_targets_completed)
Result.new(total_targets_completed: total_targets_completed, cursor_attributes: cursor.attributes)
end
end
private
attr_reader :target_scope, :group_by_column, :cursor, :runtime_limiter
attr_reader :target_model, :group_by_column, :cursor, :runtime_limiter
def process_targets
# Restore the scope from cursor so we can resume from the last run
restored_target_scope = target_scope.where('id >= ?', cursor.target_id)
# Restore the scope from cursor so we can resume from the last run. `cursor.target_id` is 0
# when the Redis cursor is first initialized or when it advances past the max target ID.
restored_target_scope = target_model.where('id >= ?', cursor.target_id)
total_targets_completed = 0
restored_target_scope.each_batch(of: TARGET_BATCH_SIZE) do |targets_relation|

View File

@ -37,9 +37,9 @@ module Gitlab
alias_method :interrupted?, :interrupted
def initialize(redis_key:, target_scope:, usage_window:)
def initialize(redis_key:, target_model:, usage_window:)
@redis_key = redis_key
@target_scope = target_scope
@target_model = target_model
@usage_window = usage_window
@interrupted = false
@ -65,26 +65,22 @@ module Gitlab
def attributes
{
target_id: target_id,
usage_window: usage_window,
usage_window: usage_window.to_h,
last_used_by_project_id: last_used_by_project_id,
last_usage_count: last_usage_count
last_usage_count: last_usage_count,
max_target_id: max_target_id
}
end
def max_target_id
target_scope.maximum(:id).to_i
end
strong_memoize_attr :max_target_id
def save!
Gitlab::Redis::SharedState.with do |redis|
redis.set(redis_key, attributes.to_json, ex: CURSOR_REDIS_KEY_TTL)
redis.set(redis_key, attributes.except(:max_target_id).to_json, ex: CURSOR_REDIS_KEY_TTL)
end
end
private
attr_reader :redis_key, :target_scope
attr_reader :redis_key, :target_model
def fetch_initial_attributes!
data = Gitlab::Redis::SharedState.with do |redis|
@ -107,6 +103,11 @@ module Gitlab
@last_usage_count = 0
end
def max_target_id
target_model.maximum(:id).to_i
end
strong_memoize_attr :max_target_id
def parse_date(date_str)
Date.parse(date_str) if date_str
end

View File

@ -29,6 +29,11 @@ module Gitlab
# values. Enums/state fields must be translated into their underlying
# representations, for example, and no hooks will be called.
#
# This tool does not support all column types. For example,
# ActiveModel::Type.lookup(column.type) throws an exception when
# the column type is `:timestamptz` (timestamp with time zone).
#
#
module BulkUpdate
LIST_SEPARATOR = ', '

80
lib/search/settings.rb Normal file
View File

@ -0,0 +1,80 @@
# frozen_string_literal: true
module Search
class Settings
include Rails.application.routes.url_helpers
def for_project(project)
project_general_settings(project).concat(
project_repository_settings(project),
project_merge_request_settings(project),
project_ci_cd_settings(project),
project_monitor_settings(project)
)
end
def project_general_settings(project)
[
{ text: _("Naming, topics, avatar"), href: edit_project_path(project, anchor: 'js-general-settings') },
{ text: _("Visibility, project features, permissions"),
href: edit_project_path(project, anchor: 'js-shared-permissions') },
{ text: _("Badges"), href: edit_project_path(project, anchor: 'js-badges-settings') },
{ text: _("Service Desk"), href: edit_project_path(project, anchor: 'js-service-desk') },
{ text: _("Advanced"), href: edit_project_path(project, anchor: 'js-project-advanced-settings') }
]
end
def project_repository_settings(project)
[
{ text: _("Branch defaults"),
href: project_settings_repository_path(project, anchor: 'branch-defaults-settings') },
{ text: _("Branch rules"), href: project_settings_repository_path(project, anchor: 'branch-rules') },
{ text: _("Mirroring repositories"),
href: project_settings_repository_path(project, anchor: 'js-push-remote-settings') },
{ text: s_('DeployTokens|Deploy tokens'),
href: project_settings_repository_path(project, anchor: 'js-deploy-tokens') },
{ text: _("Deploy keys"),
href: project_settings_repository_path(project, anchor: 'js-deploy-keys-settings') },
{ text: _("Repository cleanup"), href: project_settings_repository_path(project, anchor: 'cleanup') }
]
end
def project_merge_request_settings(project)
[
{ text: _("Merge requests"),
href: project_settings_merge_requests_path(project, anchor: 'js-merge-request-settings') }
]
end
def project_ci_cd_settings(project)
[
{ text: _("General pipelines"),
href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings') },
{ text: _("Auto DevOps"), href: project_settings_ci_cd_path(project, anchor: 'autodevops-settings') },
{ text: _("Runners"), href: project_settings_ci_cd_path(project, anchor: 'js-runners-settings') },
{ text: _("Artifacts"), href: project_settings_ci_cd_path(project, anchor: 'js-artifacts-settings') },
{ text: _("Variables"), href: project_settings_ci_cd_path(project, anchor: 'js-cicd-variables-settings') },
{ text: _("Pipeline trigger tokens"),
href: project_settings_ci_cd_path(project, anchor: 'js-pipeline-triggers') },
{ text: _("Deploy freezes"),
href: project_settings_ci_cd_path(project, anchor: 'js-deploy-freeze-settings') },
{ text: _("Token Access"), href: project_settings_ci_cd_path(project, anchor: 'js-token-access') },
{ text: _("Secure Files"),
href: project_settings_ci_cd_path(project, anchor: 'js-secure-files') }
]
end
def project_monitor_settings(project)
[
{ text: _("Error tracking"),
href: project_settings_operations_path(project, anchor: 'js-error-tracking-settings') },
{ text: _("Alerts"),
href: project_settings_operations_path(project, anchor: 'js-alert-management-settings') },
{ text: _("Incidents"),
href: project_settings_operations_path(project, anchor: 'incident-management-settings') }
]
end
end
end
Search::Settings.prepend_mod

View File

@ -12931,6 +12931,9 @@ msgstr ""
msgid "CommandPalette|Project files"
msgstr ""
msgid "CommandPalette|Settings"
msgstr ""
msgid "CommandPalette|Type %{commandHandle} for command, %{userHandle} for user, %{projectHandle} for project, %{pathHandle} for project file, or perform generic search..."
msgstr ""
@ -25475,16 +25478,16 @@ msgstr ""
msgid "GroupSettings|Security policy management"
msgstr ""
msgid "GroupSettings|Select a project"
msgstr ""
msgid "GroupSettings|Select a subgroup to use as a source of custom templates for new projects in this group. %{link_start}Learn more%{link_end}."
msgstr ""
msgid "GroupSettings|Select parent group"
msgstr ""
msgid "GroupSettings|Select the project containing Analytics Dashboards configuration files"
msgstr ""
msgid "GroupSettings|Select the project containing Analytics Dashboards configuration files."
msgid "GroupSettings|Select the project containing %{link_start}Analytics Dashboards%{link_end} configuration files."
msgstr ""
msgid "GroupSettings|Select the project containing the %{code_start}.gitlab/insights.yml%{code_end} file"
@ -41572,7 +41575,7 @@ msgstr ""
msgid "ProjectSettings|Select the default branch for this project, and configure the template for branch names."
msgstr ""
msgid "ProjectSettings|Select the project containing Analytics Dashboards configuration files."
msgid "ProjectSettings|Select the project containing %{link_start}Analytics Dashboards%{link_end} configuration files."
msgstr ""
msgid "ProjectSettings|Set the default behavior of this option in merge requests. Changes to this are also applied to existing merge requests."
@ -41728,7 +41731,7 @@ msgstr ""
msgid "ProjectSettings|With GitLab Pages you can host your static websites on GitLab. GitLab Pages uses a caching mechanism for efficiency. Your changes may not take effect until that cache is invalidated, which usually takes less than a minute."
msgstr ""
msgid "ProjectSettings|Your project is set up. %{linkStart}View instrumentation instructions%{linkEnd}."
msgid "ProjectSettings|Your project is set up. %{instructionsLinkStart}View instrumentation instructions%{instructionsLinkEnd} and %{dashboardsLinkStart}Analytics Dashboards%{dashboardsLinkEnd}."
msgstr ""
msgid "ProjectSetting|already in use"
@ -59517,6 +59520,9 @@ msgstr ""
msgid "Wiki|Pages"
msgstr ""
msgid "Wiki|Search pages"
msgstr ""
msgid "Wiki|Sidebar was successfully created."
msgstr ""

View File

@ -38,8 +38,8 @@ FactoryBot.define do
contacted_at { Time.now }
end
trait :instance do
runner_type { :instance_type }
trait :offline do
contacted_at { Ci::Runner.online_contact_time_deadline }
end
trait :unregistered do
@ -47,6 +47,29 @@ FactoryBot.define do
creation_state { :started }
end
trait :stale do
after(:build) do |runner, evaluator|
if evaluator.uncached_contacted_at.nil? && evaluator.creation_state == :finished
# Set stale contacted_at value unless this is an `:unregistered` runner
runner.contacted_at = Ci::Runner.stale_deadline
end
runner.created_at = [runner.created_at, runner.uncached_contacted_at, Ci::Runner.stale_deadline].compact.min
end
end
trait :contacted_within_stale_deadline do
contacted_at { 1.second.after(Ci::Runner.stale_deadline) }
end
trait :created_within_stale_deadline do
created_at { 1.second.after(Ci::Runner.stale_deadline) }
end
trait :instance do
runner_type { :instance_type }
end
trait :group do
runner_type { :group_type }

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe "Admin Runners", feature_category: :fleet_visibility do
RSpec.describe "Admin Runners", :freeze_time, feature_category: :fleet_visibility do
include Features::SortingHelpers
include Features::RunnersHelpers
include Spec::Support::Helpers::ModalHelpers
@ -61,9 +61,9 @@ RSpec.describe "Admin Runners", feature_category: :fleet_visibility do
context "with multiple runners" do
before do
create(:ci_runner, :instance, created_at: 1.year.ago, contacted_at: Time.zone.now)
create(:ci_runner, :instance, created_at: 1.year.ago, contacted_at: 1.day.ago)
create(:ci_runner, :instance, created_at: 1.year.ago, contacted_at: 1.week.ago)
create(:ci_runner, :instance, :online)
create(:ci_runner, :instance, :offline)
create(:ci_runner, :instance, :stale)
visit admin_runners_path
end
@ -82,9 +82,7 @@ RSpec.describe "Admin Runners", feature_category: :fleet_visibility do
it 'shows a job count' do
runner = create(:ci_runner, :project, projects: [project])
create(:ci_build, runner: runner)
create(:ci_build, runner: runner)
create_list(:ci_build, 2, runner: runner)
visit admin_runners_path
@ -262,9 +260,9 @@ RSpec.describe "Admin Runners", feature_category: :fleet_visibility do
end
before_all do
create(:ci_runner, :instance, description: 'runner-1', contacted_at: Time.zone.now)
create(:ci_runner, :instance, description: 'runner-2', contacted_at: Time.zone.now)
create(:ci_runner, :instance, description: 'runner-offline', contacted_at: 1.week.ago)
create(:ci_runner, :instance, :online, description: 'runner-1')
create(:ci_runner, :instance, :online, description: 'runner-2')
create(:ci_runner, :instance, :contacted_within_stale_deadline, description: 'runner-offline')
end
before do

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe "Group Runners", feature_category: :fleet_visibility do
RSpec.describe "Group Runners", :freeze_time, feature_category: :fleet_visibility do
include Features::RunnersHelpers
include Spec::Support::Helpers::ModalHelpers
@ -33,7 +33,7 @@ RSpec.describe "Group Runners", feature_category: :fleet_visibility do
context "with an online group runner" do
let_it_be(:group_runner) do
create(:ci_runner, :group, groups: [group], description: 'runner-foo', contacted_at: Time.zone.now)
create(:ci_runner, :group, :online, groups: [group], description: 'runner-foo')
end
before do
@ -62,7 +62,7 @@ RSpec.describe "Group Runners", feature_category: :fleet_visibility do
context "with an online project runner" do
let_it_be(:project_runner) do
create(:ci_runner, :project, projects: [project], description: 'runner-bar', contacted_at: Time.zone.now)
create(:ci_runner, :project, :online, projects: [project], description: 'runner-bar')
end
before do
@ -83,7 +83,7 @@ RSpec.describe "Group Runners", feature_category: :fleet_visibility do
context "with an online instance runner" do
let_it_be(:instance_runner) do
create(:ci_runner, :instance, description: 'runner-baz', contacted_at: Time.zone.now)
create(:ci_runner, :instance, :online, description: 'runner-baz')
end
before do
@ -140,7 +140,7 @@ RSpec.describe "Group Runners", feature_category: :fleet_visibility do
context "with an online group runner" do
let_it_be(:group_runner) do
create(:ci_runner, :group, groups: [group], description: 'runner-foo', contacted_at: Time.zone.now)
create(:ci_runner, :group, :online, groups: [group], description: 'runner-foo')
end
before do
@ -160,7 +160,7 @@ RSpec.describe "Group Runners", feature_category: :fleet_visibility do
context "with an online project runner" do
let_it_be(:project_runner) do
create(:ci_runner, :project, projects: [project], description: 'runner-bar', contacted_at: Time.zone.now)
create(:ci_runner, :project, :online, projects: [project], description: 'runner-bar')
end
before do

View File

@ -97,7 +97,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js, feature_catego
wait_for_requests
within '.js-error-tracking-settings' do
within '#js-error-tracking-settings' do
click_button('Expand')
choose('cloud-hosted Sentry')
end
@ -142,7 +142,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js, feature_catego
wait_for_requests
within '.js-error-tracking-settings' do
within '#js-error-tracking-settings' do
click_button('Expand')
choose('cloud-hosted Sentry')
check('Active')
@ -163,13 +163,13 @@ RSpec.describe 'Projects > Settings > For a forked project', :js, feature_catego
wait_for_requests
within '.js-error-tracking-settings' do
within '#js-error-tracking-settings' do
click_button('Expand')
end
expect(page).to have_content('Error tracking backend')
within '.js-error-tracking-settings' do
within '#js-error-tracking-settings' do
check('Active')
choose('GitLab')
end
@ -182,7 +182,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js, feature_catego
assert_text('Your changes have been saved')
within '.js-error-tracking-settings' do
within '#js-error-tracking-settings' do
click_button('Expand')
end

View File

@ -320,7 +320,7 @@ RSpec.describe Ci::RunnersFinder, feature_category: :fleet_visibility do
let_it_be(:project_6) { create(:project, group: sub_group_4) }
let_it_be(:runner_instance) { create(:ci_runner, :instance, contacted_at: 13.minutes.ago) }
let_it_be(:runner_group) { create(:ci_runner, :group, contacted_at: 12.minutes.ago, groups: [group]) }
let_it_be(:runner_sub_group_1) { create(:ci_runner, :group, active: false, contacted_at: 11.minutes.ago, groups: [sub_group_1]) }
let_it_be(:runner_sub_group_1) { create(:ci_runner, :group, :inactive, contacted_at: 11.minutes.ago, groups: [sub_group_1]) }
let_it_be(:runner_sub_group_2) { create(:ci_runner, :group, contacted_at: 10.minutes.ago, groups: [sub_group_2]) }
let_it_be(:runner_sub_group_3) { create(:ci_runner, :group, contacted_at: 9.minutes.ago, groups: [sub_group_3]) }
let_it_be(:runner_sub_group_4) { create(:ci_runner, :group, contacted_at: 8.minutes.ago, groups: [sub_group_4]) }

View File

@ -6,13 +6,13 @@ RSpec.describe Namespaces::ProjectsFinder, feature_category: :groups_and_project
let_it_be(:current_user) { create(:user) }
let_it_be(:namespace) { create(:group, :public) }
let_it_be(:subgroup) { create(:group, parent: namespace) }
let_it_be(:project_1) { create(:project, :public, group: namespace, path: 'project', name: 'Project') }
let_it_be(:project_2) { create(:project, :public, group: namespace, path: 'test-project', name: 'Test Project') }
let_it_be_with_reload(:project_1) { create(:project, :public, group: namespace, path: 'project', name: 'Project') }
let_it_be_with_reload(:project_2) { create(:project, :public, group: namespace, path: 'test-project', name: 'Test Project') }
let_it_be(:project_3) { create(:project, :public, :issues_disabled, path: 'sub-test-project', group: subgroup, name: 'Sub Test Project') }
let_it_be(:project_4) { create(:project, :public, :merge_requests_disabled, path: 'test-project-2', group: namespace, name: 'Test Project 2') }
let_it_be_with_reload(:project_4) { create(:project, :public, :merge_requests_disabled, path: 'test-project-2', group: namespace, name: 'Test Project 2') }
let_it_be(:project_5) { create(:project, group: subgroup, marked_for_deletion_at: 1.day.ago, pending_delete: true) }
let_it_be(:project_6) { create(:project, group: namespace, marked_for_deletion_at: 1.day.ago, pending_delete: true) }
let_it_be(:project_7) { create(:project, :archived, group: namespace) }
let_it_be_with_reload(:project_6) { create(:project, group: namespace, marked_for_deletion_at: 1.day.ago, pending_delete: true) }
let_it_be_with_reload(:project_7) { create(:project, :archived, group: namespace) }
let(:params) { {} }
@ -155,6 +155,32 @@ RSpec.describe Namespaces::ProjectsFinder, feature_category: :groups_and_project
expect(projects).to eq([project_4, project_1, project_2, project_6, project_7])
end
end
context 'as storage size' do
before do
project_1.statistics.update!(repository_size: 10, packages_size: 0)
project_2.statistics.update!(repository_size: 12, packages_size: 2)
project_4.statistics.update!(repository_size: 11, packages_size: 1)
project_6.statistics.update!(repository_size: 13, packages_size: 3)
project_7.statistics.update!(repository_size: 14, packages_size: 4)
end
context 'in ascending order' do
let(:params) { { sort: :storage_size_asc } }
it 'returns projects sorted by storage size' do
expect(projects).to eq([project_1, project_4, project_2, project_6, project_7])
end
end
context 'in descending order' do
let(:params) { { sort: :storage_size_desc } }
it 'returns projects sorted by storage size' do
expect(projects).to eq([project_7, project_6, project_2, project_4, project_1])
end
end
end
end
end
end

View File

@ -0,0 +1,22 @@
{
"type": [
"object",
"null"
],
"required": [
"id",
"name",
"username"
],
"properties": {
"id": {
"type": "integer"
},
"name": {
"type": "string"
},
"username": {
"type": "string"
}
}
}

View File

@ -0,0 +1,6 @@
{
"type": "array",
"items": {
"$ref": "safe.json"
}
}

View File

@ -1,4 +1,4 @@
import { GlSkeletonLoader } from '@gitlab/ui';
import { GlSkeletonLoader, GlSearchBoxByType } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
@ -46,6 +46,7 @@ describe('pages/shared/wikis/components/wiki_sidebar_entry', () => {
const findAllEntries = () => wrapper.findAllComponents(WikiSidebarEntry);
const findAndMapEntriesToPages = () =>
findAllEntries().wrappers.map((entry) => ({ ...entry.props('page') }));
const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
function buildWrapper(props = {}, provide = {}) {
wrapper = shallowMountExtended(WikiSidebarEntries, {
@ -57,7 +58,9 @@ describe('pages/shared/wikis/components/wiki_sidebar_entry', () => {
canCreate: false,
...provide,
},
stubs: {},
stubs: {
GlSearchBoxByType,
},
});
}
@ -171,4 +174,38 @@ describe('pages/shared/wikis/components/wiki_sidebar_entry', () => {
expect(findViewAllPagesButton().attributes('href')).toBe(MOCK_VIEW_ALL_PAGES_PATH);
});
});
describe('when searching for pages', () => {
beforeEach(async () => {
mock.onGet(MOCK_SIDEBAR_PAGES_API).reply(HTTP_STATUS_OK, MOCK_ENTRIES_MORE_THAN_LIMIT);
buildWrapper();
await waitForPromises();
findSearchBox().vm.$emit('input', 'Page 1');
});
it('lists all the filtered search results', () => {
expect(findAndMapEntriesToPages()).toEqual([
{ slug: 'page-1', path: 'path/to/page-1', title: 'Page 1', children: [] },
{ slug: 'page-10', path: 'path/to/page-10', title: 'Page 10', children: [] },
{ slug: 'page-11', path: 'path/to/page-11', title: 'Page 11', children: [] },
{ slug: 'page-12', path: 'path/to/page-12', title: 'Page 12', children: [] },
{ slug: 'page-13', path: 'path/to/page-13', title: 'Page 13', children: [] },
{ slug: 'page-14', path: 'path/to/page-14', title: 'Page 14', children: [] },
{ slug: 'page-15', path: 'path/to/page-15', title: 'Page 15', children: [] },
{ slug: 'page-16', path: 'path/to/page-16', title: 'Page 16', children: [] },
{ slug: 'page-17', path: 'path/to/page-17', title: 'Page 17', children: [] },
]);
});
it('does not display + X more text', () => {
expect(wrapper.text()).not.toMatch(/\+ \d+ more/);
});
it('has a "View all pages" button', () => {
expect(findViewAllPagesButton().exists()).toBe(true);
expect(findViewAllPagesButton().attributes('href')).toBe(MOCK_VIEW_ALL_PAGES_PATH);
});
});
});

View File

@ -50,6 +50,12 @@ describe('pages/shared/wikis/components/wiki_sidebar_entry', () => {
expect(wrapper.findByTestId('wiki-list-create-child-button').exists()).toBe(true);
});
it('highlights the searchTerm in the page title', () => {
buildWrapper({ page: { title: 'Foo', path: '/foo', children: [] }, searchTerm: 'Fo' });
expect(wrapper.html()).toContain('<strong>Fo</strong>o');
});
});
describe('when the page has children', () => {

View File

@ -7,6 +7,7 @@ import {
COMMAND_HANDLE,
USERS_GROUP_TITLE,
PATH_GROUP_TITLE,
SETTINGS_GROUP_TITLE,
USER_HANDLE,
PATH_HANDLE,
PROJECT_HANDLE,
@ -23,18 +24,20 @@ import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { mockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { COMMANDS, LINKS, USERS, FILES } from './mock_data';
import { COMMANDS, LINKS, USERS, FILES, SETTINGS } from './mock_data';
const links = LINKS.reduce(linksReducer, []);
describe('CommandPaletteItems', () => {
let wrapper;
let mockAxios;
const autocompletePath = '/autocomplete';
const settingsPath = '/settings';
const searchContext = { project: { id: 1 }, group: { id: 2 } };
const projectFilesPath = 'project/files/path';
const projectBlobPath = '/blob/main';
const createComponent = (props, options = {}) => {
const createComponent = (props, options = {}, provide = {}) => {
wrapper = shallowMount(CommandPaletteItems, {
propsData: {
handle: COMMAND_HANDLE,
@ -49,9 +52,11 @@ describe('CommandPaletteItems', () => {
commandPaletteCommands: COMMANDS,
commandPaletteLinks: LINKS,
autocompletePath,
settingsPath,
searchContext,
projectFilesPath,
projectBlobPath,
...provide,
},
...options,
});
@ -61,6 +66,11 @@ describe('CommandPaletteItems', () => {
const findGroups = () => wrapper.findAllComponents(GlDisclosureDropdownGroup);
const findLoader = () => wrapper.findComponent(GlLoadingIcon);
beforeEach(() => {
mockAxios = new MockAdapter(axios);
mockAxios.onGet('/settings?project_id=1').reply(HTTP_STATUS_OK, SETTINGS);
});
describe('Commands and links', () => {
it('renders all commands initially', () => {
createComponent();
@ -102,12 +112,6 @@ describe('CommandPaletteItems', () => {
});
describe('Users, issues, and projects', () => {
let mockAxios;
beforeEach(() => {
mockAxios = new MockAdapter(axios);
});
it('should NOT start search by the search query which is less than 3 chars', () => {
jest.spyOn(axios, 'get');
const searchQuery = 'us';
@ -153,12 +157,6 @@ describe('CommandPaletteItems', () => {
});
describe('Project files', () => {
let mockAxios;
beforeEach(() => {
mockAxios = new MockAdapter(axios);
});
it('should request project files on first search', () => {
jest.spyOn(axios, 'get');
const searchQuery = 'gitlab-ci.yml';
@ -231,20 +229,64 @@ describe('CommandPaletteItems', () => {
});
});
describe('Settings search', () => {
describe('when in a project', () => {
it('fetches settings when entering command mode', async () => {
jest.spyOn(axios, 'get');
createComponent({ handle: COMMAND_HANDLE });
await waitForPromises();
expect(axios.get).toHaveBeenCalledTimes(1);
expect(axios.get).toHaveBeenCalledWith('/settings?project_id=1');
});
it('returns settings in group when search changes', async () => {
createComponent({ handle: COMMAND_HANDLE });
await waitForPromises();
wrapper.setProps({ searchQuery: 'ava' });
await waitForPromises();
const groups = findGroups().wrappers.map((x) => x.props('group'));
expect(groups).toEqual([
{
name: SETTINGS_GROUP_TITLE,
items: SETTINGS,
},
]);
});
it('does not fetch settings when in another mode', () => {
jest.spyOn(axios, 'get');
createComponent({ handle: USER_HANDLE });
expect(axios.get).not.toHaveBeenCalled();
});
});
describe('when not in a project', () => {
it('does not fetch settings when entering command mode', () => {
jest.spyOn(axios, 'get');
createComponent(
{ handle: COMMAND_HANDLE },
{},
{ searchContext: { project: { id: null }, group: { id: 2 } } },
);
expect(axios.get).not.toHaveBeenCalled();
});
});
});
describe('Tracking', () => {
let trackingSpy;
let mockAxios;
beforeEach(() => {
trackingSpy = mockTracking(undefined, undefined, jest.spyOn);
mockAxios = new MockAdapter(axios);
createComponent({ attachTo: document.body });
});
afterEach(() => {
mockAxios.restore();
});
it('tracks event immediately', () => {
expect(trackingSpy).toHaveBeenCalledTimes(1);
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'activate_command_palette', {

View File

@ -191,3 +191,5 @@ export const FILES = [
'foo/bar/.gitkeep',
'with space/README.md',
];
export const SETTINGS = [{ text: 'Avatar', href: '/settings/general', anchor: 'avatar' }];

View File

@ -41,7 +41,7 @@ describe('FrequentlyVisitedItem', () => {
projectId: mockItem.id,
projectName: mockItem.title,
projectAvatarUrl: mockItem.avatar,
size: 24,
size: 32,
});
});

View File

@ -12,6 +12,7 @@ RSpec.describe GitlabSchema.types['ProjectSort'], feature_category: :groups_and_
*%w[
ID_ASC ID_DESC LATEST_ACTIVITY_ASC LATEST_ACTIVITY_DESC
NAME_ASC NAME_DESC PATH_ASC PATH_DESC STARS_ASC STARS_DESC
STORAGE_SIZE_ASC STORAGE_SIZE_DESC
]
)
end

View File

@ -21,18 +21,18 @@ RSpec.describe Ci::RunnersHelper, feature_category: :fleet_visibility do
end
it "returns offline text" do
runner = create(:ci_runner, contacted_at: 1.day.ago)
runner = create(:ci_runner, :offline)
expect(helper.runner_status_icon(runner)).to include("is offline")
end
it "returns stale text" do
runner = create(:ci_runner, created_at: 4.months.ago, contacted_at: 4.months.ago)
runner = create(:ci_runner, :stale)
expect(helper.runner_status_icon(runner)).to include("is stale")
expect(helper.runner_status_icon(runner)).to include("last contact was")
end
it "returns stale text, when runner never contacted" do
runner = create(:ci_runner, :unregistered, created_at: 4.months.ago)
runner = create(:ci_runner, :unregistered, :stale)
expect(helper.runner_status_icon(runner)).to include("is stale")
expect(helper.runner_status_icon(runner)).to include("never contacted")
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillVulnerabilityIssueLinksProjectId,
feature_category: :vulnerability_management,
schema: 20240613064358 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :vulnerability_issue_links }
let(:backfill_column) { :project_id }
let(:backfill_via_table) { :vulnerabilities }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :vulnerability_id }
end
end

View File

@ -7,8 +7,11 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregator, :clean_gitlab_redis_s
let_it_be(:usage_start_date) { Date.today - 30.days }
let_it_be(:usage_end_date) { Date.today - 1.day }
let_it_be(:resources) { create_list(:ci_catalog_resource, 5).sort_by(&:id) }
let_it_be(:expected_usage_counts) { resources.zip([3, 17, 0, 1, 26]).to_h }
let(:usage_model) { Ci::Catalog::Resources::Components::Usage }
let(:target_scope) { Ci::Catalog::Resource }
let(:target_model) { Ci::Catalog::Resource }
let(:group_by_column) { :catalog_resource_id }
let(:lease_key) { 'my_lease_key' }
@ -17,50 +20,39 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregator, :clean_gitlab_redis_s
end
before_all do
# First catalog resource: 3 components and 3 usages per component on usage_end_date
version = create(:ci_catalog_resource_version)
create_list(:ci_catalog_resource_component, 3, version: version).each do |component|
(1..3).each do |k|
create(
:ci_catalog_resource_component_usage,
component: component,
used_date: usage_end_date,
used_by_project_id: k
)
end
end
# Set up each resource with 1-5 versions, 1-5 components per version, and the expected usages per component
expected_usage_counts.each_with_index do |(resource, usage_count), i|
create_list(:ci_catalog_resource_version, i + 1, catalog_resource: resource).each do |version|
(1..i + 1).each do |j|
component = create(:ci_catalog_resource_component, version: version, name: "component#{j}")
# Create 4 more catalog resources, each with 1-4 components and 0-6 usages
# per component on different dates before and after usage_end_date
create_list(:ci_catalog_resource_version, 4).each_with_index do |version, i|
create_list(:ci_catalog_resource_component, i + 1, version: version).each_with_index do |component, j|
next unless j > 0
(1..j * 2).each do |k|
create(
:ci_catalog_resource_component_usage,
component: component,
used_date: usage_end_date - 3.days + k.days,
used_by_project_id: k
)
(1..usage_count).each do |k|
# Inside the usage window
create(:ci_catalog_resource_component_usage,
component: component, used_date: usage_start_date, used_by_project_id: k)
# Outside the usage window
create(:ci_catalog_resource_component_usage,
component: component, used_date: usage_start_date - k.days, used_by_project_id: k)
end
end
end
end
end
describe '#each_batch' do
shared_examples 'when the runtime limit is not reached' do
shared_examples 'when the aggregator is not interrupted' do
it 'returns the expected result' do
# We process all catalog resources and advance the cursor
batched_usage_counts, result = run_new_aggregator_each_batch
expect(batched_usage_counts).to eq(expected_batched_usage_counts)
expect(result.total_targets_completed).to eq(target_scope.count)
expect(result.cursor.attributes).to eq({
expect(result.total_targets_completed).to eq(target_model.count)
expect(result.cursor_attributes).to eq({
target_id: 0,
usage_window: usage_window,
usage_window: usage_window.to_h,
last_used_by_project_id: 0,
last_usage_count: 0
last_usage_count: 0,
max_target_id: target_model.maximum(:id).to_i
})
end
end
@ -70,9 +62,9 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregator, :clean_gitlab_redis_s
stub_const("#{described_class}::DISTINCT_USAGE_BATCH_SIZE", 2)
end
it_behaves_like 'when the runtime limit is not reached'
it_behaves_like 'when the aggregator is not interrupted'
context 'when the runtime limit is reached' do
context 'when the aggregator is interrupted' do
before do
# Sets the aggregator to break after the first iteration on each run
stub_const("#{described_class}::MAX_RUNTIME", 0)
@ -84,30 +76,32 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregator, :clean_gitlab_redis_s
expect(batched_usage_counts).to eq([])
expect(result.total_targets_completed).to eq(0)
expect(result.cursor.attributes).to eq({
target_id: target_scope.first.id,
usage_window: usage_window,
expect(result.cursor_attributes).to eq({
target_id: target_model.first.id,
usage_window: usage_window.to_h,
last_used_by_project_id: 2,
last_usage_count: 2
last_usage_count: 2,
max_target_id: target_model.maximum(:id).to_i
})
# On 2nd run, we get the complete usage count for the first catalog resource and advance the cursor
batched_usage_counts, result = run_new_aggregator_each_batch
expect(batched_usage_counts).to eq([{ target_scope.first => 3 }])
expect(batched_usage_counts).to eq([{ target_model.first => 3 }])
expect(result.total_targets_completed).to eq(1)
expect(result.cursor.attributes).to eq({
target_id: target_scope.first.id + 1,
usage_window: usage_window,
expect(result.cursor_attributes).to eq({
target_id: target_model.first.id + 1,
usage_window: usage_window.to_h,
last_used_by_project_id: 0,
last_usage_count: 0
last_usage_count: 0,
max_target_id: target_model.maximum(:id).to_i
})
all_batched_usage_counts = batched_usage_counts + repeat_new_aggregator_each_batch_until_done
batched_usage_counts_merged = all_batched_usage_counts.flatten.reduce(&:merge)
expect(batched_usage_counts_merged.length).to eq(5)
expect(batched_usage_counts_merged).to eq(expected_batched_usage_counts_merged)
expect(batched_usage_counts_merged).to eq(expected_usage_counts)
end
context 'when a target is deleted between runs' do
@ -117,20 +111,21 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregator, :clean_gitlab_redis_s
expect(batched_usage_counts).to eq([])
expect(result.total_targets_completed).to eq(0)
expect(result.cursor.attributes).to eq({
target_id: target_scope.first.id,
usage_window: usage_window,
expect(result.cursor_attributes).to eq({
target_id: target_model.first.id,
usage_window: usage_window.to_h,
last_used_by_project_id: 2,
last_usage_count: 2
last_usage_count: 2,
max_target_id: target_model.maximum(:id).to_i
})
target_scope.first.delete
target_model.first.delete
all_batched_usage_counts = repeat_new_aggregator_each_batch_until_done
batched_usage_counts_merged = all_batched_usage_counts.reduce(&:merge)
expect(batched_usage_counts_merged.length).to eq(4)
expect(batched_usage_counts_merged).to eq(expected_batched_usage_counts_merged)
expect(batched_usage_counts_merged).to eq(expected_usage_counts.except(resources.first))
end
end
@ -142,13 +137,13 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregator, :clean_gitlab_redis_s
batched_usage_counts_merged = all_batched_usage_counts.reduce(&:merge)
expect(batched_usage_counts_merged.length).to eq(5)
expect(batched_usage_counts_merged).to eq(expected_batched_usage_counts_merged)
expect(batched_usage_counts_merged).to eq(expected_usage_counts.transform_values { 0 })
end
end
end
end
it_behaves_like 'when the runtime limit is not reached'
it_behaves_like 'when the aggregator is not interrupted'
it_behaves_like 'with multiple distinct usage batches'
context 'with multiple target batches' do
@ -156,7 +151,7 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregator, :clean_gitlab_redis_s
stub_const("#{described_class}::TARGET_BATCH_SIZE", 3)
end
it_behaves_like 'when the runtime limit is not reached'
it_behaves_like 'when the aggregator is not interrupted'
it_behaves_like 'with multiple distinct usage batches'
end
@ -173,7 +168,7 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregator, :clean_gitlab_redis_s
def run_new_aggregator_each_batch
aggregator = described_class.new(
target_scope: target_scope,
target_model: target_model,
group_by_column: group_by_column,
usage_start_date: usage_start_date,
usage_end_date: usage_end_date,
@ -190,25 +185,9 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregator, :clean_gitlab_redis_s
end
def expected_batched_usage_counts
batched_usage_counts = []
target_scope.each_batch(of: described_class::TARGET_BATCH_SIZE) do |targets|
usage_counts = usage_model
.includes(:catalog_resource)
.select('catalog_resource_id, COUNT(DISTINCT used_by_project_id) AS usage_count')
.where(used_date: usage_start_date..usage_end_date)
.where(group_by_column => targets)
.group(:catalog_resource_id)
.each_with_object({}) { |r, hash| hash[r.catalog_resource] = r.usage_count }
batched_usage_counts << targets.index_with { 0 }.merge(usage_counts)
resources.each_slice(described_class::TARGET_BATCH_SIZE).map do |batch|
expected_usage_counts.slice(*batch)
end
batched_usage_counts
end
def expected_batched_usage_counts_merged
expected_batched_usage_counts.reduce(&:merge)
end
def repeat_new_aggregator_each_batch_until_done
@ -217,7 +196,7 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregator, :clean_gitlab_redis_s
30.times do
batched_usage_counts, result = run_new_aggregator_each_batch
all_batched_usage_counts << batched_usage_counts
break if result.cursor.target_id == 0
break if result.cursor_attributes[:target_id] == 0
end
all_batched_usage_counts.flatten

View File

@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Components::Usages::Aggregators::Cursor, :clean_gitlab_redis_shared_state,
feature_category: :pipeline_composition do
let(:redis_key) { 'my_redis_key:cursor' }
let(:target_scope) { class_double(Ci::Catalog::Resource, maximum: max_target_id) }
let(:target_model) { class_double(Ci::Catalog::Resource, maximum: max_target_id) }
let(:max_target_id) { initial_redis_attributes[:target_id] }
let(:usage_window) { described_class::Window.new(Date.parse('2024-01-08'), Date.parse('2024-01-14')) }
@ -14,13 +14,13 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregators::Cursor, :clean_gitla
let(:initial_redis_attributes) do
{
target_id: 1,
usage_window: initial_redis_usage_window,
usage_window: initial_redis_usage_window.to_h,
last_used_by_project_id: 100,
last_usage_count: 10
}
end
subject(:cursor) { described_class.new(redis_key: redis_key, target_scope: target_scope, usage_window: usage_window) }
subject(:cursor) { described_class.new(redis_key: redis_key, target_model: target_model, usage_window: usage_window) }
before do
Gitlab::Redis::SharedState.with do |redis|
@ -30,7 +30,7 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregators::Cursor, :clean_gitla
describe '.new' do
it 'fetches and parses the attributes from Redis' do
expect(cursor.attributes).to eq(initial_redis_attributes)
expect(cursor.attributes).to include(initial_redis_attributes)
end
context 'when Redis usage_window is different than the given usage_window' do
@ -39,9 +39,9 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregators::Cursor, :clean_gitla
end
it 'resets last usage attributes' do
expect(cursor.attributes).to eq({
expect(cursor.attributes).to include({
target_id: initial_redis_attributes[:target_id],
usage_window: usage_window,
usage_window: usage_window.to_h,
last_used_by_project_id: 0,
last_usage_count: 0
})
@ -56,9 +56,9 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregators::Cursor, :clean_gitla
end
it 'sets target_id and last usage attributes to zero' do
expect(cursor.attributes).to eq({
expect(cursor.attributes).to include({
target_id: 0,
usage_window: usage_window,
usage_window: usage_window.to_h,
last_used_by_project_id: 0,
last_usage_count: 0
})
@ -76,9 +76,9 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregators::Cursor, :clean_gitla
)
expect(cursor.interrupted?).to eq(true)
expect(cursor.attributes).to eq({
expect(cursor.attributes).to include({
target_id: initial_redis_attributes[:target_id],
usage_window: usage_window,
usage_window: usage_window.to_h,
last_used_by_project_id: initial_redis_attributes[:last_used_by_project_id] + 1,
last_usage_count: initial_redis_attributes[:last_usage_count] + 1
})
@ -90,9 +90,9 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregators::Cursor, :clean_gitla
it 'sets new target_id and resets last usage attributes' do
cursor.target_id = initial_redis_attributes[:target_id] + 1
expect(cursor.attributes).to eq({
expect(cursor.attributes).to include({
target_id: initial_redis_attributes[:target_id] + 1,
usage_window: usage_window,
usage_window: usage_window.to_h,
last_used_by_project_id: 0,
last_usage_count: 0
})
@ -101,7 +101,7 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregators::Cursor, :clean_gitla
context 'when new target_id is the same as cursor target_id' do
it 'does not change cursor attributes' do
expect(cursor.attributes).to eq(initial_redis_attributes)
expect(cursor.attributes).to include(initial_redis_attributes)
end
end
end
@ -115,9 +115,10 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregators::Cursor, :clean_gitla
expect(cursor.attributes).to eq({
target_id: initial_redis_attributes[:target_id] + 1,
usage_window: usage_window,
usage_window: usage_window.to_h,
last_used_by_project_id: 0,
last_usage_count: 0
last_usage_count: 0,
max_target_id: max_target_id
})
end
end
@ -128,29 +129,17 @@ RSpec.describe Gitlab::Ci::Components::Usages::Aggregators::Cursor, :clean_gitla
expect(cursor.attributes).to eq({
target_id: 0,
usage_window: usage_window,
usage_window: usage_window.to_h,
last_used_by_project_id: 0,
last_usage_count: 0
last_usage_count: 0,
max_target_id: max_target_id
})
end
end
end
describe '#max_target_id' do
let(:target_scope) { Ci::Catalog::Resource }
before_all do
create(:ci_catalog_resource, id: 123)
create(:ci_catalog_resource, id: 100)
end
it 'returns maximum ID of the target scope' do
expect(cursor.max_target_id).to eq(123)
end
end
describe '#save!' do
it 'saves cursor attributes to Redis as JSON' do
it 'saves cursor attributes except max_target_id to Redis as JSON' do
cursor.target_id = 11
cursor.interrupt!(
last_used_by_project_id: 33,

View File

@ -6,7 +6,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountCiRunnersInstanceT
let(:expected_value) { 1 }
before do
create(:ci_runner, contacted_at: 1.second.ago)
create(:ci_runner, :online)
end
it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }

View File

@ -0,0 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe "Search results for settings", :js, feature_category: :global_search, type: :feature do
it_behaves_like 'all sections exist and have correct anchor links'
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillVulnerabilityIssueLinksProjectId, feature_category: :vulnerability_management do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :vulnerability_issue_links,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main_cell,
job_arguments: [
:project_id,
:vulnerabilities,
:project_id,
:vulnerability_id
]
)
}
end
end
end

View File

@ -882,31 +882,31 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
context 'when there is a runner' do
let(:runner) { create(:ci_runner, :project, projects: [build.project]) }
before do
runner.update!(contacted_at: 1.second.ago)
create(:ci_runner, *runner_traits, :project, projects: [build.project])
end
it { is_expected.to be_truthy }
context 'that is online' do
let(:runner_traits) { [:online] }
it { is_expected.to be_truthy }
end
context 'that is inactive' do
before do
runner.update!(active: false)
end
let(:runner_traits) { [:online, :inactive] }
it { is_expected.to be_falsey }
end
context 'that is not online' do
before do
runner.update!(contacted_at: nil)
end
context 'that is offline' do
let(:runner_traits) { [:offline] }
it { is_expected.to be_falsey }
end
context 'that cannot handle build' do
let(:runner_traits) { [:online] }
before do
expect_any_instance_of(Gitlab::Ci::Matching::RunnerMatcher).to receive(:matches?).with(build.build_matcher).and_return(false)
end

View File

@ -217,9 +217,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
describe '#access_level' do
context 'when creating new runner and access_level is nil' do
let(:runner) do
build(:ci_runner, access_level: nil)
end
let(:runner) { build(:ci_runner, access_level: nil) }
it "object is invalid" do
expect(runner).not_to be_valid
@ -227,9 +225,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
context 'when creating new runner and access_level is defined in enum' do
let(:runner) do
build(:ci_runner, access_level: :not_protected)
end
let(:runner) { build(:ci_runner, access_level: :not_protected) }
it "object is valid" do
expect(runner).to be_valid
@ -474,10 +470,10 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
describe '.recent' do
subject { described_class.recent }
let!(:runner1) { create(:ci_runner, contacted_at: nil, created_at: 6.days.ago) }
let!(:runner2) { create(:ci_runner, contacted_at: nil, created_at: 7.days.ago) }
let!(:runner3) { create(:ci_runner, contacted_at: 1.day.ago, created_at: 6.days.ago) }
let!(:runner4) { create(:ci_runner, contacted_at: 1.day.ago, created_at: 7.days.ago) }
let!(:runner1) { create(:ci_runner, :unregistered, :created_within_stale_deadline) }
let!(:runner2) { create(:ci_runner, :unregistered, :stale) }
let!(:runner3) { create(:ci_runner, :created_within_stale_deadline, :contacted_within_stale_deadline) }
let!(:runner4) { create(:ci_runner, :stale, :contacted_within_stale_deadline) }
it { is_expected.to contain_exactly(runner1, runner3, runner4) }
end
@ -506,17 +502,15 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
describe '.paused' do
before do
expect(described_class).to receive(:active).with(false).and_call_original
end
subject { described_class.paused }
subject(:paused) { described_class.paused }
let!(:runner1) { create(:ci_runner, :instance, active: false) }
let!(:runner2) { create(:ci_runner, :instance) }
it 'returns inactive runners' do
is_expected.to match_array([runner1])
expect(described_class).to receive(:active).with(false).and_call_original
expect(paused).to contain_exactly(runner1)
end
end
@ -560,39 +554,33 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
subject { runner.stale? }
before do
allow_any_instance_of(described_class).to receive(:cached_attribute).and_call_original
allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).once
end
context 'table tests' do
using RSpec::Parameterized::TableSyntax
let(:stale_deadline) { described_class.stale_deadline }
let(:almost_stale_deadline) { 1.second.after(stale_deadline) }
where(:created_at, :contacted_at, :expected_stale?) do
nil | nil | false
7.days.ago | 7.days.ago | true
7.days.ago | (7.days - 1.hour).ago | false
7.days.ago | nil | true
(7.days - 1.hour).ago | nil | false
nil | nil | false
ref(:stale_deadline) | ref(:stale_deadline) | true
ref(:stale_deadline) | ref(:almost_stale_deadline) | false
ref(:stale_deadline) | nil | true
ref(:almost_stale_deadline) | nil | false
end
with_them do
before do
runner.created_at = created_at
runner.contacted_at = contacted_at
end
context 'no cache value' do
before do
stub_redis_runner_contacted_at(nil)
runner.contacted_at = contacted_at
end
it { is_expected.to eq(expected_stale?) }
end
it { is_expected.to eq(expected_stale?) }
context 'with cache value' do
before do
runner.contacted_at = contacted_at ? contacted_at + 1.week : nil
stub_redis_runner_contacted_at(contacted_at.to_s)
end
@ -613,69 +601,51 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
describe '#online?', :clean_gitlab_redis_cache, :freeze_time do
let(:runner) { build(:ci_runner, :instance) }
subject { runner.online? }
before do
allow_any_instance_of(described_class).to receive(:cached_attribute).and_call_original
context 'never contacted' do
let(:runner) { build(:ci_runner, :unregistered) }
it { is_expected.to be_falsey }
end
context 'no cache value' do
before do
stub_redis_runner_contacted_at(nil)
end
context 'contacted long time ago' do
let(:runner) { build(:ci_runner, :stale) }
context 'never contacted' do
before do
runner.contacted_at = nil
end
it { is_expected.to be_falsey }
end
it { is_expected.to be_falsey }
end
context 'contacted now' do
let(:runner) { build(:ci_runner, :online) }
context 'contacted long time ago' do
before do
runner.contacted_at = 1.year.ago
end
it { is_expected.to be_falsey }
end
context 'contacted 1s ago' do
before do
runner.contacted_at = 1.second.ago
end
it { is_expected.to be_truthy }
end
it { is_expected.to be_truthy }
end
context 'with cache value' do
let(:runner) { create(:ci_runner, :stale) }
before do
stub_redis_runner_contacted_at(cached_contacted_at.to_s)
end
context 'contacted long time ago' do
before do
runner.contacted_at = 1.year.ago
stub_redis_runner_contacted_at(1.year.ago.to_s)
end
let(:cached_contacted_at) { runner.uncached_contacted_at }
it { is_expected.to be_falsey }
end
context 'contacted 1s ago' do
before do
runner.contacted_at = 50.minutes.ago
stub_redis_runner_contacted_at(1.second.ago.to_s)
end
let(:cached_contacted_at) { 1.second.ago }
it { is_expected.to be_truthy }
end
end
def stub_redis_runner_contacted_at(value)
Gitlab::Redis::Cache.with do |redis|
cache_key = runner.send(:cache_attribute_key)
expect(redis).to receive(:get).with(cache_key)
.and_return({ contacted_at: value }.to_json).at_least(:once)
def stub_redis_runner_contacted_at(value)
Gitlab::Redis::Cache.with do |redis|
cache_key = runner.send(:cache_attribute_key)
expect(redis).to receive(:get).with(cache_key)
.and_return({ contacted_at: value }.to_json).at_least(:once)
end
end
end
end
@ -866,76 +836,65 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
subject { runner.status }
context 'never connected' do
let(:runner) { build(:ci_runner, :instance, :unregistered, created_at: 7.days.ago) }
let(:runner) { build(:ci_runner, :unregistered, :stale) }
it { is_expected.to eq(:stale) }
context 'created recently' do
let(:runner) { build(:ci_runner, :instance, :unregistered, created_at: 1.day.ago) }
let(:runner) { build(:ci_runner, :unregistered, created_at: 1.day.ago) }
it { is_expected.to eq(:never_contacted) }
end
end
context 'inactive but online' do
let(:runner) { build(:ci_runner, :instance, active: false, contacted_at: 1.second.ago) }
let(:runner) { build(:ci_runner, :inactive, :online) }
it { is_expected.to eq(:online) }
end
context 'contacted 1s ago' do
let(:runner) { build(:ci_runner, :instance, contacted_at: 1.second.ago) }
let(:runner) { build(:ci_runner, contacted_at: 1.second.ago) }
it { is_expected.to eq(:online) }
end
context 'contacted recently' do
let(:runner) { build(:ci_runner, :instance, contacted_at: (7.days - 1.second).ago) }
let(:runner) { build(:ci_runner, :contacted_within_stale_deadline) }
it { is_expected.to eq(:offline) }
end
context 'contacted long time ago' do
let(:runner) { build(:ci_runner, :instance, created_at: 7.days.ago, contacted_at: 7.days.ago) }
let(:runner) { build(:ci_runner, :stale) }
it { is_expected.to eq(:stale) }
end
end
describe '#deprecated_rest_status', :freeze_time do
let(:runner) { create(:ci_runner, :instance, contacted_at: 1.second.ago) }
subject { runner.deprecated_rest_status }
context 'never connected' do
before do
runner.contacted_at = nil
end
let(:runner) { build(:ci_runner, :unregistered) }
it { is_expected.to eq(:never_contacted) }
end
context 'contacted 1s ago' do
before do
runner.contacted_at = 1.second.ago
end
context 'contacted recently' do
let(:runner) { build(:ci_runner, :online) }
it { is_expected.to eq(:online) }
end
context 'contacted long time ago' do
before do
runner.created_at = 7.days.ago
runner.contacted_at = 7.days.ago
end
let(:runner) { build(:ci_runner, :stale) }
it { is_expected.to eq(:stale) }
end
context 'inactive' do
before do
runner.active = false
end
let(:runner) { build(:ci_runner, :inactive, :online) }
it { is_expected.to eq(:paused) }
end
@ -1008,16 +967,12 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
describe '#heartbeat', :freeze_time do
let(:runner) { create(:ci_runner, :project) }
subject(:heartbeat) do
runner.heartbeat
end
context 'when database was updated recently' do
before do
runner.contacted_at = Time.current
end
let(:runner) { create(:ci_runner, :online) }
it 'updates cache' do
expect_redis_update
@ -1027,14 +982,8 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
context 'when database was not updated recently' do
before do
runner.contacted_at = 2.hours.ago
end
context 'with invalid runner' do
before do
runner.runner_projects.delete_all
end
let(:runner) { create(:ci_runner, :offline, :project, :without_projects) }
it 'still updates contacted at in redis cache and database' do
expect(runner).to be_invalid
@ -1065,7 +1014,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
describe '#clear_heartbeat', :freeze_time do
let!(:runner) { create(:ci_runner, :project) }
let!(:runner) { create(:ci_runner) }
it 'clears contacted at' do
expect do
@ -1238,13 +1187,13 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
describe '#has_tags?' do
context 'when runner has tags' do
subject { create(:ci_runner, tag_list: ['tag']) }
subject { build(:ci_runner, tag_list: ['tag']) }
it { is_expected.to have_tags }
end
context 'when runner does not have tags' do
subject { create(:ci_runner, tag_list: []) }
subject { build(:ci_runner, tag_list: []) }
it { is_expected.not_to have_tags }
end
@ -1908,7 +1857,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
describe '#ensure_token' do
let(:runner) { described_class.new(registration_type: registration_type) }
let(:runner) { build(:ci_runner, registration_type: registration_type) }
let(:token) { 'an_existing_secret_token' }
let(:static_prefix) { described_class::CREATED_RUNNER_TOKEN_PREFIX }
@ -1980,8 +1929,8 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
describe 'status scopes' do
let_it_be(:online_runner) { create(:ci_runner, :instance, contacted_at: 1.second.ago) }
let_it_be(:offline_runner) { create(:ci_runner, :instance, contacted_at: 2.hours.ago) }
let_it_be(:online_runner) { create(:ci_runner, :instance, :online) }
let_it_be(:offline_runner) { create(:ci_runner, :instance, :offline) }
let_it_be(:never_contacted_runner) { create(:ci_runner, :instance, :unregistered) }
describe '.online' do
@ -2011,13 +1960,8 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
describe '.stale', :freeze_time do
subject { described_class.stale }
let!(:stale_runner1) do
create(:ci_runner, :instance, created_at: described_class.stale_deadline - 1.second, contacted_at: nil)
end
let!(:stale_runner2) do
create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: described_class.stale_deadline - 1.second)
end
let!(:stale_runner1) { create(:ci_runner, :unregistered, :stale) }
let!(:stale_runner2) { create(:ci_runner, :stale) }
it 'returns stale runners' do
is_expected.to contain_exactly(stale_runner1, stale_runner2)
@ -2050,7 +1994,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
let_it_be(:instance_runner) { create(:ci_runner, :instance) }
let_it_be(:group_runner) { create(:ci_runner, :group) }
let_it_be(:project_runner) { create(:ci_runner, :project) }
let_it_be(:project_runner) { create(:ci_runner, :project, :without_projects) }
context 'with instance_type' do
let(:runner_type) { 'instance_type' }

View File

@ -19,6 +19,7 @@ RSpec.describe Enums::Sbom, feature_category: :dependency_management do
:npm | 6
:nuget | 7
:pypi | 8
:cargo | 14
:apk | 9
:rpm | 10
:deb | 11
@ -37,8 +38,8 @@ RSpec.describe Enums::Sbom, feature_category: :dependency_management do
end
it 'contains all of the dependency scanning and container scanning purl types' do
expect(described_class::DEPENDENCY_SCANNING_PURL_TYPES + described_class::CONTAINER_SCANNING_PURL_TYPES)
.to eql(described_class::PURL_TYPES.keys)
expect((described_class::DEPENDENCY_SCANNING_PURL_TYPES + described_class::CONTAINER_SCANNING_PURL_TYPES).sort)
.to eql(described_class::PURL_TYPES.keys.sort)
end
end
@ -53,6 +54,7 @@ RSpec.describe Enums::Sbom, feature_category: :dependency_management do
'npm' | true
'nuget' | true
'pypi' | true
'cargo' | true
'unknown' | false
'apk' | false
'rpm' | false
@ -78,6 +80,7 @@ RSpec.describe Enums::Sbom, feature_category: :dependency_management do
'npm' | false
'nuget' | false
'pypi' | false
'cargo' | false
'unknown' | false
:apk | false
'apk' | true

View File

@ -2212,6 +2212,12 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
let_it_be(:project2) { create(:project, star_count: 1) }
let_it_be(:project3) { create(:project, last_activity_at: 2.minutes.ago) }
before_all do
create(:project_statistics, project: project1, repository_size: 1)
create(:project_statistics, project: project2, repository_size: 3)
create(:project_statistics, project: project3, repository_size: 2)
end
it 'reorders the input relation by start count desc' do
projects = described_class.sort_by_attribute(:stars_desc)
@ -2241,6 +2247,18 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
expect(projects).to eq([project1, project2, project3].sort_by(&:path).reverse)
end
it 'reorders the input relation by storage size asc' do
projects = described_class.sort_by_attribute(:storage_size_asc)
expect(projects).to eq([project1, project3, project2])
end
it 'reorders the input relation by storage size desc' do
projects = described_class.sort_by_attribute(:storage_size_desc)
expect(projects).to eq([project2, project3, project1])
end
end
describe '.order_by_storage_size' do
@ -2249,11 +2267,11 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
let_it_be(:project_3) { create(:project_statistics, repository_size: 2).project }
context 'ascending' do
it { expect(described_class.order_by_storage_size(:asc)).to eq([project_1, project_3, project_2]) }
it { expect(described_class.sorted_by_storage_size_asc).to eq([project_1, project_3, project_2]) }
end
context 'descending' do
it { expect(described_class.order_by_storage_size(:desc)).to eq([project_2, project_3, project_1]) }
it { expect(described_class.sorted_by_storage_size_desc).to eq([project_2, project_3, project_1]) }
end
end

View File

@ -689,7 +689,7 @@ RSpec.describe 'Query.runner(id)', :freeze_time, feature_category: :fleet_visibi
end
let_it_be(:never_contacted_instance_runner) do
create(:ci_runner, :unregistered, description: 'Missing runner 1', created_at: 6.days.ago)
create(:ci_runner, :unregistered, :created_within_stale_deadline, description: 'Missing runner 1')
end
let(:query) do

View File

@ -8,10 +8,6 @@ RSpec.describe SearchController, type: :request, feature_category: :global_searc
let_it_be(:project) { create(:project, :public, :repository, :wiki_repo, name: 'awesome project', group: group) }
let_it_be(:projects) { create_list(:project, 5, :public, :repository, :wiki_repo) }
before do
login_as(user)
end
def send_search_request(params)
get search_path, params: params
end
@ -37,6 +33,10 @@ RSpec.describe SearchController, type: :request, feature_category: :global_searc
describe 'GET /search' do
let(:creation_traits) { [] }
before do
login_as(user)
end
context 'for issues scope' do
let(:object) { :issue }
let(:labels) { create_list(:label, 3, project: project) }
@ -199,4 +199,61 @@ RSpec.describe SearchController, type: :request, feature_category: :global_searc
end
end
end
describe 'GET /search/settings' do
subject(:request) { get search_settings_path, params: params }
let(:params) { nil }
context 'when user is not signed-in' do
it { is_expected.to redirect_to(new_user_session_path) }
end
context 'when user is signed-in' do
before do
login_as(user)
end
context 'when project_id param is missing' do
it 'raises an error' do
expect { request }.to raise_error(ActionController::ParameterMissing)
end
end
context 'when given project is not found' do
let(:params) { { project_id: non_existing_record_id } }
it 'returns an empty array' do
request
expect(response.body).to eq '[]'
end
end
context 'when user is not allowed to change settings in given project' do
let(:params) { { project_id: project.id } }
it 'returns an empty array' do
request
expect(response.body).to eq '[]'
end
end
context 'when user is allowed to change settings in given project' do
before_all do
project.add_maintainer(user)
end
let(:params) { { project_id: project.id } }
it 'returns all available settings results' do
expect_next_instance_of(Search::Settings) do |settings|
expect(settings).to receive(:for_project).with(project).and_return(%w[foo bar])
end
request
expect(response.body).to eq '["foo","bar"]'
end
end
end
end
end

View File

@ -0,0 +1,183 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::Catalog::Resources::AggregateLast30DayUsageService, :clean_gitlab_redis_shared_state, :freeze_time,
feature_category: :pipeline_composition do
let_it_be(:usage_start_date) { Date.today - described_class::WINDOW_LENGTH }
let_it_be(:usage_end_date) { Date.today - 1.day }
let_it_be(:initial_usage_count_updated_at) { usage_end_date.to_time }
let_it_be(:resources) { create_list(:ci_catalog_resource, 4).sort_by(&:id) }
let_it_be(:expected_ordered_usage_counts) { [3, 1, 0, 15] }
let(:expected_cursor_attributes) do
{
target_id: 0,
usage_window: usage_window_hash,
last_used_by_project_id: 0,
last_usage_count: 0,
max_target_id: Ci::Catalog::Resource.maximum(:id).to_i
}
end
let(:usage_window_hash) { { start_date: usage_start_date, end_date: usage_end_date } }
let(:lease_key) { described_class.name }
let(:service) { described_class.new }
before_all do
# Set up each resource with 1-4 versions, 1-4 components per version, and the expected usages per component
expected_ordered_usage_counts.each_with_index do |usage_count, i|
resource = resources[i]
create_list(:ci_catalog_resource_version, i + 1, catalog_resource: resource).each do |version|
(1..i + 1).each do |j|
component = create(:ci_catalog_resource_component, version: version, name: "component#{j}")
(1..usage_count).each do |k|
# Inside the usage window
create(:ci_catalog_resource_component_usage,
component: component, used_date: usage_start_date, used_by_project_id: k)
# Outside the usage window
create(:ci_catalog_resource_component_usage,
component: component, used_date: usage_start_date - k.days, used_by_project_id: k)
end
end
end
end
Ci::Catalog::Resource.update_all(last_30_day_usage_count_updated_at: initial_usage_count_updated_at)
end
describe '#execute' do
context 'when the aggregator is not interrupted' do
shared_examples 'aggregates usage data for all catalog resources' do
it 'returns a success response' do
response = service.execute
expect(response).to be_success
expect(response.payload).to eq({
total_targets_completed: 4,
cursor_attributes: expected_cursor_attributes
})
end
end
it_behaves_like 'aggregates usage data for all catalog resources'
it 'calls BulkUpdate once and updates usage counts for all catalog resources' do
expect(Gitlab::Database::BulkUpdate).to receive(:execute).once.and_call_original
service.execute
expect(ordered_usage_counts).to eq(expected_ordered_usage_counts)
expect(ordered_usage_counts_updated_at).to match_array([Time.current] * 4)
end
context 'when there are two batches of usage counts' do
before do
stub_const('Gitlab::Ci::Components::Usages::Aggregator::TARGET_BATCH_SIZE', 2)
end
it_behaves_like 'aggregates usage data for all catalog resources'
it 'calls BulkUpdate twice and updates usage counts for all catalog resources' do
expect(Gitlab::Database::BulkUpdate).to receive(:execute).twice.and_call_original
service.execute
expect(ordered_usage_counts).to eq(expected_ordered_usage_counts)
expect(ordered_usage_counts_updated_at).to match_array([Time.current] * 4)
end
end
context 'when some catalog resources have already been processed today' do
before_all do
resources.first(2).each do |resource|
resource.update!(last_30_day_usage_count_updated_at: Date.today.to_time)
end
end
# The cursor has not advanced so it still processes all targets
it_behaves_like 'aggregates usage data for all catalog resources'
it 'calls BulkUpdate once and updates usage counts for all catalog resources' do
expect(Gitlab::Database::BulkUpdate).to receive(:execute).once.and_call_original
service.execute
expect(ordered_usage_counts).to eq(expected_ordered_usage_counts)
expect(ordered_usage_counts_updated_at).to match_array([Time.current] * 4)
end
end
context 'when all catalog resources have already been processed today' do
before_all do
Ci::Catalog::Resource.update_all(last_30_day_usage_count_updated_at: Date.today.to_time)
end
it 'does not aggregate usage data' do
expect(Gitlab::Ci::Components::Usages::Aggregator).not_to receive(:new)
response = service.execute
expect(response).to be_success
expect(response.message).to eq("Processing complete for #{Date.today}")
expect(response.payload).to eq({})
end
end
end
context 'when the aggregator is interrupted' do
before do
# Sets the aggregator to break after the first iteration on each run
stub_const('Gitlab::Ci::Components::Usages::Aggregator::MAX_RUNTIME', 0)
stub_const('Gitlab::Ci::Components::Usages::Aggregator::DISTINCT_USAGE_BATCH_SIZE', 2)
end
it 'updates the expected usage counts for each run' do
# On 1st run, we get an incomplete usage count for the first catalog resource so it is not saved
expect { service.execute }
.to not_change { ordered_usage_counts }
.and not_change { ordered_usage_counts_updated_at }
# On 2nd run, we get the complete usage count for the first catalog resource and save it
service.execute
expect(ordered_usage_counts).to eq([expected_ordered_usage_counts.first, 0, 0, 0])
expect(ordered_usage_counts_updated_at).to eq([Time.current, [initial_usage_count_updated_at] * 3].flatten)
# Execute service repeatedly until done
30.times do
response = service.execute
break if response.payload[:cursor_attributes][:target_id] == 0
end
expect(ordered_usage_counts).to eq(expected_ordered_usage_counts)
expect(ordered_usage_counts_updated_at).to match_array([Time.current] * 4)
end
end
context 'when another instance is running with the same lease key' do
it 'returns a success response with the lease key' do
lease = Gitlab::ExclusiveLease.new(lease_key, timeout: 1.minute).tap(&:try_obtain)
response = service.execute
expect(response).to be_success
expect(response.message).to eq('Lease taken')
expect(response.payload).to eq({ lease_key: lease_key })
lease.cancel
end
end
end
private
def ordered_usage_counts
Ci::Catalog::Resource.order(:id).pluck(:last_30_day_usage_count)
end
def ordered_usage_counts_updated_at
Ci::Catalog::Resource.order(:id).pluck(:last_30_day_usage_count_updated_at)
end
end

View File

@ -161,7 +161,7 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute', feature_categor
context 'when it exceeds the application limits' do
before do
create(:ci_runner, runner_type: :project_type, projects: [project], contacted_at: 1.second.ago)
create(:ci_runner, :project, projects: [project], contacted_at: 1.second.ago)
create(:plan_limits, :default_plan, ci_registered_project_runners: 1)
end
@ -179,7 +179,7 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute', feature_categor
context 'when abandoned runners cause application limits to not be exceeded' do
before do
create(:ci_runner, runner_type: :project_type, projects: [project], created_at: 14.months.ago, contacted_at: 13.months.ago)
create(:ci_runner, :project, :stale, projects: [project])
create(:plan_limits, :default_plan, ci_registered_project_runners: 1)
end
@ -246,7 +246,7 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute', feature_categor
context 'when it exceeds the application limits' do
before do
create(:ci_runner, :unregistered, runner_type: :group_type, groups: [group], created_at: 6.days.ago)
create(:ci_runner, :unregistered, :created_within_stale_deadline, :group, groups: [group])
create(:plan_limits, :default_plan, ci_registered_group_runners: 1)
end
@ -264,8 +264,8 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute', feature_categor
context 'when abandoned runners cause application limits to not be exceeded' do
before do
create(:ci_runner, runner_type: :group_type, groups: [group], created_at: 4.months.ago, contacted_at: 3.months.ago)
create(:ci_runner, :unregistered, runner_type: :group_type, groups: [group], created_at: 4.months.ago)
create(:ci_runner, :group, :stale, groups: [group])
create(:ci_runner, :unregistered, :group, groups: [group], created_at: 4.months.ago)
create(:plan_limits, :default_plan, ci_registered_group_runners: 1)
end

View File

@ -21,7 +21,7 @@ RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', :freez
context 'with runner created in UI' do
let!(:runner_manager1) { create(:ci_runner_machine, runner: runner, system_xid: 'system_id_1') }
let!(:runner_manager2) { create(:ci_runner_machine, runner: runner, system_xid: 'system_id_2') }
let!(:runner) { create(:ci_runner, registration_type: :authenticated_user, contacted_at: Time.current) }
let!(:runner) { create(:ci_runner, :online, registration_type: :authenticated_user) }
context 'with system_id specified' do
let(:system_id) { runner_manager1.system_xid }

View File

@ -9,11 +9,13 @@ RSpec.shared_context 'runners resolver setup' do
let_it_be(:project) { create(:project, :public, group: group) }
let_it_be(:inactive_project_runner) do
create(:ci_runner, :project, projects: [project], description: 'inactive project runner', token: 'abcdef', active: false, contacted_at: 1.minute.ago, tag_list: %w[project_runner])
create(:ci_runner, :project, :inactive, :online, projects: [project],
description: 'inactive project runner', token: 'abcdef', tag_list: %w[project_runner])
end
let_it_be(:offline_project_runner) do
create(:ci_runner, :project, projects: [project], description: 'offline project runner', token: 'defghi', contacted_at: 1.day.ago, tag_list: %w[project_runner active_runner])
create(:ci_runner, :project, :offline, projects: [project],
description: 'offline project runner', token: 'defghi', tag_list: %w[project_runner active_runner])
end
let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group], token: 'mnopqr', description: 'group runner', contacted_at: 2.seconds.ago) }

View File

@ -17,6 +17,7 @@ RSpec.shared_examples 'shared super sidebar context' do
issues_path: issues_dashboard_path,
mr_path: merge_requests_dashboard_path,
autocomplete_path: search_autocomplete_path,
settings_path: search_settings_path,
search_context: helper.header_search_context
},
panel_type: panel_type

View File

@ -15,7 +15,8 @@ RSpec.shared_examples 'purl_types enum' do
rpm: 10,
deb: 11,
'cbl-mariner': 12,
wolfi: 13
wolfi: 13,
cargo: 14
}
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
RSpec.shared_examples 'all sections exist and have correct anchor links' do
let(:settings) { Search::Settings.new.for_project(project) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
it 'has only valid settings sections' do
sign_in(user)
project.add_maintainer(user)
current_href = nil
settings.each do |setting|
# This speeds up the spec by not repeatedly visiting the same page.
if current_href != remove_anchor_from_url(setting[:href])
visit setting[:href]
current_href = remove_anchor_from_url(setting[:href])
end
expect(page).to have_content setting[:text]
expect(page).to have_css "##{URI.parse(setting[:href]).fragment}"
end
end
def remove_anchor_from_url(url)
uri = URI.parse(url)
uri.fragment = nil
uri.to_s
end
end

View File

@ -0,0 +1,71 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::Catalog::Resources::AggregateLast30DayUsageWorker, feature_category: :pipeline_composition do
subject(:worker) { described_class.new }
include_examples 'an idempotent worker'
it 'has the `until_executed` deduplicate strategy' do
expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
end
it 'has the option to reschedule once if deduplicated and a TTL' do
expect(described_class.get_deduplication_options).to include(
{ if_deduplicated: :reschedule_once, ttl: Gitlab::Ci::Components::Usages::Aggregator::WORKER_DEDUP_TTL })
end
describe '#perform', :clean_gitlab_redis_shared_state, :freeze_time do
let_it_be(:usage_start_date) { Date.today - Ci::Catalog::Resources::AggregateLast30DayUsageService::WINDOW_LENGTH }
let_it_be(:usage_end_date) { Date.today - 1.day }
let_it_be(:resources) { create_list(:ci_catalog_resource, 3).sort_by(&:id) }
let_it_be(:expected_ordered_usage_counts) { [7, 12, 0] }
let(:usage_window_hash) { { start_date: usage_start_date, end_date: usage_end_date } }
subject(:perform) { worker.perform }
before_all do
# Set up each resource with 1 version and 1 component, and the expected usages per component
expected_ordered_usage_counts.each_with_index do |usage_count, i|
resource = resources[i]
version = create(:ci_catalog_resource_version, catalog_resource: resource)
component = create(:ci_catalog_resource_component, version: version)
(1..usage_count).each do |k|
create(:ci_catalog_resource_component_usage,
component: component, used_date: usage_start_date, used_by_project_id: k)
end
end
end
it 'aggregates and updates usage counts for all catalog resources' do
perform
ordered_usage_counts = Ci::Catalog::Resource.order(:id).pluck(:last_30_day_usage_count)
ordered_usage_counts_updated_at = Ci::Catalog::Resource.order(:id).pluck(:last_30_day_usage_count_updated_at)
expect(ordered_usage_counts).to eq(expected_ordered_usage_counts)
expect(ordered_usage_counts_updated_at).to match_array([Time.current] * 3)
end
it 'logs the service response' do
expect(worker).to receive(:log_hash_metadata_on_done)
.with(
status: :success,
message: 'Targets processed',
total_targets_completed: 3,
cursor_attributes: {
target_id: 0,
usage_window: usage_window_hash,
last_used_by_project_id: 0,
last_usage_count: 0,
max_target_id: Ci::Catalog::Resource.maximum(:id).to_i
})
perform
end
end
end