Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-01-25 15:09:25 +00:00
parent 7b1fa4c1a1
commit ec558ad8ed
74 changed files with 739 additions and 434 deletions

View File

@ -338,7 +338,7 @@ gem 'sentry-sidekiq', '~> 5.1.1'
# PostgreSQL query parsing
#
gem 'pg_query', '~> 2.2'
gem 'pg_query', '~> 2.2', '>= 2.2.1'
gem 'premailer-rails', '~> 1.10.3'

View File

@ -422,7 +422,7 @@
{"name":"pg","version":"1.4.5","platform":"x64-mingw-ucrt","checksum":"614814a4597fed5c4a85e107a96ef6c8ee01b3e7dbc6529912249b7d778e5651"},
{"name":"pg","version":"1.4.5","platform":"x64-mingw32","checksum":"d9a15cb4ee478bf719fee6ecd6c8b41d5569515ee0d968e561fe120aed862cb1"},
{"name":"pg","version":"1.4.5","platform":"x86-mingw32","checksum":"255764ff8ac89203cc9dcc7188a4205e760fa7b884d75c94007b79897ee8613d"},
{"name":"pg_query","version":"2.2.0","platform":"ruby","checksum":"84a37548412f540061bcc52ee2915352297832816bca60e4524c716e03f1e950"},
{"name":"pg_query","version":"2.2.1","platform":"ruby","checksum":"6086972bbf4eab86d8425b35f14ca8b6fe41e4341423582801c1ec86ff5f8cea"},
{"name":"plist","version":"3.6.0","platform":"ruby","checksum":"f468bcf6b72ec6d1585ed6744eb4817c1932a5bf91895ed056e69b7f12ca10f2"},
{"name":"png_quantizator","version":"0.2.1","platform":"ruby","checksum":"6023d4d064125c3a7e02929c95b7320ed6ac0d7341f9e8de0c9ea6576ef3106b"},
{"name":"po_to_json","version":"1.0.1","platform":"ruby","checksum":"6a7188aa6c42a22c9718f9b39062862ef7f3d8f6a7b4177cae058c3308b56af7"},

View File

@ -1076,7 +1076,7 @@ GEM
peek (1.1.0)
railties (>= 4.0.0)
pg (1.4.5)
pg_query (2.2.0)
pg_query (2.2.1)
google-protobuf (>= 3.19.2)
plist (3.6.0)
png_quantizator (0.2.1)
@ -1777,7 +1777,7 @@ DEPENDENCIES
parslet (~> 1.8)
peek (~> 1.1)
pg (~> 1.4.5)
pg_query (~> 2.2)
pg_query (~> 2.2, >= 2.2.1)
png_quantizator (~> 0.2.1)
premailer-rails (~> 1.10.3)
prometheus-client-mmap (~> 0.17)

View File

@ -15,7 +15,7 @@ export default {
},
computed: {
isOauthSelfManagedEnabled() {
return this.glFeatures.jiraConnectOauth && this.glFeatures.jiraConnectOauthSelfManaged;
return this.glFeatures.jiraConnectOauth;
},
},
};

View File

@ -334,7 +334,7 @@ export const timeToHoursMinutes = (time = '') => {
* @param {String} offset An optional Date-compatible offset.
* @returns {String} The combined Date's ISO string representation.
*/
export const dateAndTimeToISOString = (date, time, offset = '') => {
export const dateAndTimeToISOString = (date, time = '00:00', offset = '') => {
const { year, month, day } = dateToYearMonthDate(date);
const { hours, minutes } = timeToHoursMinutes(time);
const dateString = `${year}-${month}-${day}T${hours}:${minutes}:00.000${offset || 'Z'}`;

View File

@ -141,11 +141,7 @@ export default {
:value="targetProjectId"
/>
<projects-dropdown
class="gl-w-half"
:value="targetProjectName"
@selectProject="setSelectedProject"
/>
<projects-dropdown :value="targetProjectName" @selectProject="setSelectedProject" />
</gl-form-group>
<gl-form-group

View File

@ -1,5 +1,5 @@
<script>
import { GlDropdown, GlSearchBoxByType, GlDropdownItem, GlDropdownText } from '@gitlab/ui';
import { GlCollapsibleListbox } from '@gitlab/ui';
import { mapGetters, mapState } from 'vuex';
import {
I18N_NO_RESULTS_MESSAGE,
@ -10,10 +10,7 @@ import {
export default {
name: 'ProjectsDropdown',
components: {
GlDropdown,
GlSearchBoxByType,
GlDropdownItem,
GlDropdownText,
GlCollapsibleListbox,
},
props: {
value: {
@ -41,17 +38,20 @@ export default {
project.name.toLowerCase().includes(lowerCasedFilterTerm),
);
},
listboxItems() {
return this.filteredResults.map(({ id, name }) => ({ value: id, text: name }));
},
selectedProject() {
return this.sortedProjects.find((project) => project.id === this.targetProjectId) || {};
},
},
methods: {
selectProject(project) {
this.$emit('selectProject', project.id);
this.filterTerm = project.name; // when we select a project, we want the dropdown to filter to the selected project
},
isSelected(selectedProject) {
return selectedProject === this.selectedProject;
selectProject(value) {
this.$emit('selectProject', value);
// when we select a project, we want the dropdown to filter to the selected project
const project = this.listboxItems.find((x) => x.value === value);
this.filterTerm = project?.text || '';
},
filterTermChanged(value) {
this.filterTerm = value;
@ -60,28 +60,15 @@ export default {
};
</script>
<template>
<gl-dropdown :text="selectedProject.name" :header-text="$options.i18n.projectHeaderTitle">
<gl-search-box-by-type
:value="filterTerm"
trim
autocomplete="off"
:placeholder="$options.i18n.projectSearchPlaceholder"
data-testid="dropdown-search-box"
@input="filterTermChanged"
/>
<gl-dropdown-item
v-for="project in filteredResults"
:key="project.name"
:name="project.name"
:is-checked="isSelected(project)"
is-check-item
data-testid="dropdown-item"
@click="selectProject(project)"
>
{{ project.name }}
</gl-dropdown-item>
<gl-dropdown-text v-if="!filteredResults.length" data-testid="empty-result-message">
<span class="gl-text-gray-500">{{ $options.i18n.noResultsMessage }}</span>
</gl-dropdown-text>
</gl-dropdown>
<gl-collapsible-listbox
:header-text="$options.i18n.projectHeaderTitle"
:items="listboxItems"
searchable
:search-placeholder="$options.i18n.projectSearchPlaceholder"
:selected="selectedProject.id"
:toggle-text="selectedProject.name"
:no-results-text="$options.i18n.noResultsMessage"
@search="filterTermChanged"
@select="selectProject"
/>
</template>

View File

@ -67,12 +67,13 @@ export default {
<gl-link
v-gl-tooltip
class="d-flex align-items-center monospace"
:title="__('Download evidence JSON')"
:download="evidenceTitle(index)"
target="_blank"
:title="__('Open evidence JSON in new tab')"
:href="evidenceUrl(index)"
>
<gl-icon name="review-list" class="align-middle gl-mr-3" />
<span>{{ evidenceTitle(index) }}</span>
<gl-icon name="external-link" class="gl-ml-2 gl-flex-shrink-0 gl-flex-grow-0" />
</gl-link>
<expand-button>

View File

@ -13,7 +13,7 @@ export default {
},
methods: {
checkSlots() {
return this.$scopedSlots.default?.()?.some((c) => c.tag);
return this.$scopedSlots.default?.()?.some((c) => c.elm?.innerText);
},
},
};

View File

@ -62,6 +62,7 @@ export default {
:value="state"
:options="$options.states"
:disabled="disabled"
data-testid="work-item-state-select"
class="gl-w-auto hide-select-decoration gl-pl-3"
:class="{ 'gl-bg-transparent! gl-cursor-text!': disabled }"
@change="setState"

View File

@ -313,6 +313,7 @@ export default {
:view-only="!canUpdate"
:allow-clear-all="isEditing"
class="assignees-selector gl-flex-grow-1 gl-border gl-border-white gl-rounded-base col-9 gl-align-self-start gl-px-0! gl-mx-2"
data-testid="work-item-asssignees-input"
@input="handleAssigneesInput"
@text-input="debouncedSearchKeyUpdate"
@focus="handleFocus"

View File

@ -198,6 +198,7 @@ export default {
:form-field-placeholder="__('Write a comment or drag your files here…')"
form-field-id="work-item-add-comment"
form-field-name="work-item-add-comment"
data-testid="work-item-add-comment"
enable-autocomplete
autofocus
use-bottom-toolbar

View File

@ -215,6 +215,7 @@ export default {
ref="startDatePicker"
v-model="dirtyStartDate"
container="body"
data-testid="work-item-start-date-picker"
:disabled="isDatepickerDisabled"
:input-id="$options.startDateInputId"
show-clear-button
@ -240,6 +241,7 @@ export default {
ref="dueDatePicker"
v-model="dirtyDueDate"
container="body"
data-testid="work-item-due-date-picker"
:disabled="isDatepickerDisabled"
:input-id="$options.dueDateInputId"
:min-date="dirtyStartDate"

View File

@ -266,6 +266,7 @@ export default {
:loading="isLoading"
:view-only="!canUpdate"
class="gl-flex-grow-1 gl-border gl-border-white gl-rounded-base col-9 gl-align-self-start gl-px-0! gl-mx-2!"
data-testid="work-item-labels-input"
:class="{ 'gl-hover-border-gray-200': canUpdate }"
@input="focusTokenSelector"
@text-input="debouncedSearchKeyUpdate"

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Ci
module AuthBuildTrace
extend ActiveSupport::Concern
def authorize_read_build_trace!
return if can?(current_user, :read_build_trace, build)
if build.debug_mode?
access_denied!(
_('You must have developer or higher permissions in the associated project to view job logs when debug ' \
"trace is enabled. To disable debug trace, set the 'CI_DEBUG_TRACE' and 'CI_DEBUG_SERVICES' variables to " \
"'false' in your pipeline configuration or CI/CD settings. If you must view this job log, " \
'a project maintainer or owner must add you to the project with developer permissions or higher.')
)
else
access_denied!(_('The current user is not authorized to access the job log.'))
end
end
end
end

View File

@ -32,21 +32,6 @@ class Projects::ApplicationController < ApplicationController
->(project) { !project.pending_delete? }
end
def authorize_read_build_trace!
return if can?(current_user, :read_build_trace, build)
if build.debug_mode?
access_denied!(
_('You must have developer or higher permissions in the associated project to view job logs when debug trace ' \
"is enabled. To disable debug trace, set the 'CI_DEBUG_TRACE' and 'CI_DEBUG_SERVICES' variables to 'false' " \
'in your pipeline configuration or CI/CD settings. If you must view this job log, a project maintainer ' \
'or owner must add you to the project with developer permissions or higher.')
)
else
access_denied!(_('The current user is not authorized to access the job log.'))
end
end
def build_canonical_path(project)
params[:namespace_id] = project.namespace.to_param
params[:project_id] = project.to_param

View File

@ -1,6 +1,7 @@
# frozen_string_literal: true
class Projects::ArtifactsController < Projects::ApplicationController
include Ci::AuthBuildTrace
include ExtractsPath
include RendersBlob
include SendFileUpload

View File

@ -1,14 +1,15 @@
# frozen_string_literal: true
class Projects::JobsController < Projects::ApplicationController
include Ci::AuthBuildTrace
include SendFileUpload
include ContinueParams
include ProjectStatsRefreshConflictsGuard
urgency :low, [:index, :show, :trace, :retry, :play, :cancel, :unschedule, :erase, :raw]
before_action :find_job_as_build, except: [:index, :play, :show]
before_action :find_job_as_processable, only: [:play, :show]
before_action :find_job_as_build, except: [:index, :play, :show, :retry]
before_action :find_job_as_processable, only: [:play, :show, :retry]
before_action :authorize_read_build_trace!, only: [:trace, :raw]
before_action :authorize_read_build!
before_action :authorize_update_build!,
@ -76,7 +77,11 @@ class Projects::JobsController < Projects::ApplicationController
response = Ci::RetryJobService.new(project, current_user).execute(@build)
if response.success?
redirect_to build_path(response[:job])
if @build.is_a?(::Ci::Build)
redirect_to build_path(response[:job])
else
head :ok
end
else
respond_422
end

View File

@ -94,8 +94,10 @@ module Types
resolver: Resolvers::Ci::RunnerResolver,
extras: [:lookahead],
description: "Find a runner."
field :runner_platforms, resolver: Resolvers::Ci::RunnerPlatformsResolver
field :runner_setup, resolver: Resolvers::Ci::RunnerSetupResolver
field :runner_platforms, resolver: Resolvers::Ci::RunnerPlatformsResolver,
deprecated: { reason: 'No longer used, use gitlab-runner documentation to learn about supported platforms', milestone: '15.9' }
field :runner_setup, resolver: Resolvers::Ci::RunnerSetupResolver,
deprecated: { reason: 'No longer used, use gitlab-runner documentation to learn about runner registration commands', milestone: '15.9' }
field :runners, Types::Ci::RunnerType.connection_type,
null: true,
resolver: Resolvers::Ci::RunnersResolver,

View File

@ -55,7 +55,11 @@ module Ci
end
def retryable?
false
return false unless Feature.enabled?(:ci_recreate_downstream_pipeline, project)
return false if failed? && (pipeline_loop_detected? || reached_max_descendant_pipelines_depth?)
super
end
def self.with_preloads
@ -186,6 +190,10 @@ module Ci
def persisted_environment
end
def deployment_job?
false
end
def execute_hooks
raise NotImplementedError
end

View File

@ -1,21 +0,0 @@
# frozen_string_literal: true
module Clusters
module Applications
class Cilium < ApplicationRecord
self.table_name = 'clusters_applications_cilium'
include ::Clusters::Concerns::ApplicationCore
include ::Clusters::Concerns::ApplicationStatus
# Cilium can only be installed and uninstalled through the
# cluster-applications project by triggering CI pipeline for a
# management project. UI operations are not available for such
# applications. More information:
# https://docs.gitlab.com/ee/user/clusters/management_project.html
def allowed_to_uninstall?
false
end
end
end
end

View File

@ -19,8 +19,7 @@ module Clusters
Clusters::Applications::Prometheus.application_name => Clusters::Applications::Prometheus,
Clusters::Applications::Runner.application_name => Clusters::Applications::Runner,
Clusters::Applications::Jupyter.application_name => Clusters::Applications::Jupyter,
Clusters::Applications::Knative.application_name => Clusters::Applications::Knative,
Clusters::Applications::Cilium.application_name => Clusters::Applications::Cilium
Clusters::Applications::Knative.application_name => Clusters::Applications::Knative
}.freeze
DEFAULT_ENVIRONMENT = '*'
KUBE_INGRESS_BASE_DOMAIN = 'KUBE_INGRESS_BASE_DOMAIN'
@ -64,7 +63,6 @@ module Clusters
has_one_cluster_application :runner
has_one_cluster_application :jupyter
has_one_cluster_application :knative
has_one_cluster_application :cilium
has_many :kubernetes_namespaces
has_many :metrics_dashboard_annotations, class_name: 'Metrics::Dashboard::Annotation', inverse_of: :cluster

View File

@ -991,6 +991,13 @@ class Project < ApplicationRecord
namespace.owner == user
end
def invalidate_personal_projects_count_of_owner
return unless personal?
return unless namespace.owner
namespace.owner.invalidate_personal_projects_count
end
def project_setting
super.presence || build_project_setting
end

View File

@ -125,7 +125,7 @@ module Projects
setup_authorizations
current_user.invalidate_personal_projects_count
project.invalidate_personal_projects_count_of_owner
Projects::PostCreationWorker.perform_async(@project.id)

View File

@ -34,7 +34,7 @@ module Projects
publish_project_deleted_event_for(project)
current_user.invalidate_personal_projects_count
project.invalidate_personal_projects_count_of_owner
true
rescue StandardError => error

View File

@ -32,9 +32,9 @@ module Projects
raise TransferError, s_("TransferProject|You don't have permission to transfer projects into that namespace.")
end
transfer(project)
@owner_of_personal_project_before_transfer = project.namespace.owner if project.personal?
current_user.invalidate_personal_projects_count
transfer(project)
true
rescue Projects::TransferService::TransferError => ex
@ -121,6 +121,7 @@ module Projects
# Overridden in EE
def post_update_hooks(project)
ensure_personal_project_owner_membership(project)
invalidate_personal_projects_counts
publish_event
end
@ -129,6 +130,18 @@ module Projects
def remove_paid_features
end
def invalidate_personal_projects_counts
# If the project was moved out of a personal namespace,
# the cache of the namespace owner, before the transfer, should be cleared.
if @owner_of_personal_project_before_transfer.present?
@owner_of_personal_project_before_transfer.invalidate_personal_projects_count
end
# If the project has now moved into a personal namespace,
# the cache of the target namespace owner should be cleared.
project.invalidate_personal_projects_count_of_owner
end
def transfer_missing_group_resources(group)
Labels::TransferService.new(current_user, group, project).execute

View File

@ -0,0 +1,8 @@
name: ci_recreate_downstream_pipeline
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108698
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/387972
milestone: '15.9'
type: development
group: group::pipeline authoring
default_enabled: false

View File

@ -0,0 +1,13 @@
- title: 'GitLab Runner platforms and setup instructions in GraphQL API'
announcement_milestone: '15.9'
announcement_date: '2023-02-22'
removal_milestone: '17.0'
removal_date: '2024-05-22'
breaking_change: true
reporter: mrincon
body: |
The `runnerPlatforms` and `runnerSetup` queries to get GitLab Runner platforms and installation instructions
are deprecated and will be removed from the GraphQL API. For installation instructions, you should use the
[GitLab Runner documentation](https://docs.gitlab.com/runner/)
stage: verify
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/387937

View File

@ -1,7 +1,5 @@
---
table_name: clusters_applications_cilium
classes:
- Clusters::Applications::Cilium
feature_categories:
- security_policy_management
description: Information about installed instance of Cilium in the cluster

View File

@ -114,7 +114,7 @@ from [owasp.org](https://owasp.org/).
### What private and public network links support the application?
- Customers choose their own networks. As sites are intended to be
geographically separated, it is envisioned that replication traffic will pass
geographically separated, it is envisioned that replication traffic passes
over the public Internet in a typical deployment, but this is not a requirement.
## Systems
@ -168,7 +168,7 @@ from [owasp.org](https://owasp.org/).
- GitLab is "cloud native" and this applies to Geo as much as to the rest of the
product. Deployment in clouds is a common and supported scenario.
## If applicable, what approach(es) to cloud computing will be taken (Managed Hosting versus "Pure" Cloud, a "full machine" approach such as AWS-EC2 versus a "hosted database" approach such as AWS-RDS and Azure, etc)?
## If applicable, what approach(es) to cloud computing is taken (Managed Hosting versus "Pure" Cloud, a "full machine" approach such as AWS-EC2 versus a "hosted database" approach such as AWS-RDS and Azure, etc)?
- To be decided by our customers, according to their operational needs.
@ -186,7 +186,7 @@ from [owasp.org](https://owasp.org/).
- PostgreSQL >= 12, Redis, Sidekiq, Puma.
### How will database connection strings, encryption keys, and other sensitive components be stored, accessed, and protected from unauthorized detection?
### How can database connection strings, encryption keys, and other sensitive components be stored, accessed, and protected from unauthorized detection?
- There are some Geo-specific values. Some are shared secrets which must be
securely transmitted from the **primary** site to the **secondary** site at setup time. Our

View File

@ -130,7 +130,7 @@ strategy.
Administrators can enable a background job that performs housekeeping in all
repositories at a customizable interval to remedy this situation. This
background job processes all repositories hosted by a Gitaly node in a random
order and eagerly performs housekeeping tasks on them. The Gitaly node will stop
order and eagerly performs housekeeping tasks on them. The Gitaly node stops
processing repositories if it takes longer than the configured interval.
#### Configure scheduled housekeeping
@ -166,7 +166,7 @@ of a repository. When creating the first fork, we:
1. Create an object pool repository that contains all objects of the repository
that is about to be forked.
1. Link the repository to this new object pool via Git's alternates mechanism.
1. Link the repository to this new object pool via the alternates mechanism of Git.
1. Repack the repository so that it uses objects from the object pool. It thus
can drop its own copy of the objects.
@ -181,12 +181,12 @@ GitLab needs to perform special housekeeping operations in object pools:
thus maintain references to unreachable "dangling" objects so that they don't
ever get deleted.
- GitLab must update object pools regularly to pull in new objects that have
been added in the primary repository. Otherwise, an object pool will become
been added in the primary repository. Otherwise, an object pool becomes
increasingly inefficient at deduplicating objects.
These housekeeping operations are performed by the specialized
`FetchIntoObjectPool` RPC that handles all of these special tasks while also
executing the regular housekeeping tasks we execute for normal Git
executing the regular housekeeping tasks we execute for standard Git
repositories.
Object pools are getting optimized automatically whenever the primary member is

View File

@ -23,7 +23,7 @@ We have a few core goals with these packages:
GitLab in its core is a Ruby on Rails project. However, GitLab as a whole
application is more complex and has multiple components. If these components are
not present or are incorrectly configured, GitLab will not work or it will work
not present or are incorrectly configured, GitLab does not work or it works
unpredictably.
The [GitLab Architecture Overview](../../development/architecture.md#gitlab-architecture-overview) shows some of these components and how they
@ -112,4 +112,4 @@ what was noted above:
1. Running separate services in multiple containers and keeping them running
can be more complex and might not be required for a given install.
This method is useful for organizations just getting started with containers and schedulers, and may not be ready for a more complex installation. This method is a great introduction, and will work well for smaller organizations.
This method is useful for organizations just getting started with containers and schedulers, and may not be ready for a more complex installation. This method is a great introduction, and works well for smaller organizations.

View File

@ -87,7 +87,7 @@ docker.elastic.co/elasticsearch/elasticsearch:5.5.1
```
Then confirm it works in the browser at `curl "http://<IP_ADDRESS>:9200/_cat/health"`.
Elasticsearch's default username is `elastic` and password is `changeme`.
In Elasticsearch, the default username is `elastic`, and the default password is `changeme`.
### Kroki

View File

@ -414,6 +414,10 @@ Returns [`CiRunner`](#cirunner).
Supported runner platforms.
WARNING:
**Deprecated** in 15.9.
No longer used, use gitlab-runner documentation to learn about supported platforms.
Returns [`RunnerPlatformConnection`](#runnerplatformconnection).
This field returns a [connection](#connections). It accepts the
@ -424,6 +428,10 @@ four standard [pagination arguments](#connection-pagination-arguments):
Runner setup instructions.
WARNING:
**Deprecated** in 15.9.
No longer used, use gitlab-runner documentation to learn about runner registration commands.
Returns [`RunnerSetup`](#runnersetup).
#### Arguments

View File

@ -6,20 +6,20 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Avoiding downtime in migrations
When working with a database certain operations may require downtime. Since we
When working with a database certain operations may require downtime. As we
cannot have downtime in migrations we need to use a set of steps to get the
same end result without downtime. This guide describes various operations that
may appear to need downtime, their impact, and how to perform them without
requiring downtime.
## Dropping Columns
## Dropping columns
Removing columns is tricky because running GitLab processes may still be using
the columns. To work around this safely, you need three steps in three releases:
1. Ignoring the column (release M)
1. Dropping the column (release M+1)
1. Removing the ignore rule (release M+2)
1. [Ignoring the column](#ignoring-the-column-release-m) (release M)
1. [Dropping the column](#dropping-the-column-release-m1) (release M+1)
1. [Removing the ignore rule](#removing-the-ignore-rule-release-m2) (release M+2)
The reason we spread this out across three releases is that dropping a column is
a destructive operation that can't be rolled back easily.
@ -27,9 +27,9 @@ a destructive operation that can't be rolled back easily.
Following this procedure helps us to make sure there are no deployments to GitLab.com
and upgrade processes for self-managed installations that lump together any of these steps.
### Step 1: Ignoring the column (release M)
### Ignoring the column (release M)
The first step is to ignore the column in the application code. This is
The first step is to ignore the column in the application code. This step is
necessary because Rails caches the columns and re-uses this cache in various
places. This can be done by defining the columns to ignore. For example, to ignore
`updated_at` in the User model you'd use the following:
@ -50,7 +50,7 @@ ignore_columns %i[updated_at created_at], remove_with: '12.7', remove_after: '20
If the model exists in CE and EE, the column has to be ignored in the CE model. If the
model only exists in EE, then it has to be added there.
We require indication of when it is safe to remove the column ignore with:
We require indication of when it is safe to remove the column ignore rule with:
- `remove_with`: set to a GitLab release typically two releases (M+2) after adding the
column ignore.
@ -64,7 +64,7 @@ to ignore the column and subsequently remove the column ignore (which would resu
In this example, the change to ignore the column went into release 12.5.
### Step 2: Dropping the column (release M+1)
### Dropping the column (release M+1)
Continuing our example, dropping the column goes into a _post-deployment_ migration in release 12.6:
@ -74,12 +74,14 @@ Start by creating the **post-deployment migration**:
bundle exec rails g post_deployment_migration remove_users_updated_at_column
```
There are two scenarios that you need to consider
to write a migration that removes a column:
You must consider these scenarios when you write a migration that removes a column:
#### A. The removed column has no indexes or constraints that belong to it
- [The removed column has no indexes or constraints that belong to it](#the-removed-column-has-no-indexes-or-constraints-that-belong-to-it)
- [The removed column has an index or constraint that belongs to it](#the-removed-column-has-an-index-or-constraint-that-belongs-to-it)
In this case, a **transactional migration** can be used. Something as simple as:
#### The removed column has no indexes or constraints that belong to it
In this case, a **transactional migration** can be used:
```ruby
class RemoveUsersUpdatedAtColumn < Gitlab::Database::Migration[2.1]
@ -97,10 +99,10 @@ You can consider [enabling lock retries](../migration_style_guide.md#usage-with-
when you run a migration on big tables, because it might take some time to
acquire a lock on this table.
#### B. The removed column has an index or constraint that belongs to it
#### The removed column has an index or constraint that belongs to it
If the `down` method requires adding back any dropped indexes or constraints, that cannot
be done within a transactional migration, then the migration would look like this:
be done in a transactional migration. The migration would look like this:
```ruby
class RemoveUsersUpdatedAtColumn < Gitlab::Database::Migration[2.1]
@ -131,7 +133,7 @@ is used to disable the transaction that wraps the whole migration.
You can refer to the page [Migration Style Guide](../migration_style_guide.md)
for more information about database migrations.
### Step 3: Removing the ignore rule (release M+2)
### Removing the ignore rule (release M+2)
With the next release, in this example 12.7, we set up another merge request to remove the ignore rule.
This removes the `ignore_column` line and - if not needed anymore - also the inclusion of `IgnoreableColumns`.
@ -139,18 +141,24 @@ This removes the `ignore_column` line and - if not needed anymore - also the inc
This should only get merged with the release indicated with `remove_with` and once
the `remove_after` date has passed.
## Renaming Columns
## Renaming columns
Renaming columns the normal way requires downtime as an application may continue
Renaming columns the standard way requires downtime as an application may continue
to use the old column names during or after a database migration. To rename a column
without requiring downtime, we need two migrations: a regular migration and a
post-deployment migration. Both these migrations can go in the same release.
The steps:
1. [Add the regular migration](#add-the-regular-migration-release-m) (release M)
1. [Ignore the column](#ignore-the-column-release-m) (release M)
1. [Add a post-deployment migration](#add-a-post-deployment-migration-release-m) (release M)
1. [Remove the ignore rule](#remove-the-ignore-rule-release-m1) (release M+1)
NOTE:
It's not possible to rename columns with default values. For more details, see
[this merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/52032#default-values).
### Step 1: Add The Regular Migration
### Add the regular migration (release M)
First we need to create the regular migration. This migration should use
`Gitlab::Database::MigrationHelpers#rename_column_concurrently` to perform the
@ -178,7 +186,20 @@ If a column contains one or more indexes that don't contain the name of the
original column, the previously described procedure fails. In that case,
you need to rename these indexes.
### Step 2: Add A Post-Deployment Migration
### Ignore the column (release M)
The next step is to ignore the column in the application code, and make sure it is not used. This step is
necessary because Rails caches the columns and re-uses this cache in various places.
This step is similar to [the first step when column is dropped](#ignoring-the-column-release-m), and the same requirements apply.
```ruby
class User < ApplicationRecord
include IgnorableColumns
ignore_column :updated_at, remove_with: '12.7', remove_after: '2020-01-22'
end
```
### Add a post-deployment migration (release M)
The renaming procedure requires some cleaning up in a post-deployment migration.
We can perform this cleanup using
@ -202,7 +223,11 @@ end
If you're renaming a [large table](https://gitlab.com/gitlab-org/gitlab/-/blob/master/rubocop/rubocop-migrations.yml#L3), carefully consider the state when the first migration has run but the second cleanup migration hasn't been run yet.
With [Canary](https://gitlab.com/gitlab-com/gl-infra/readiness/-/tree/master/library/canary/) it is possible that the system runs in this state for a significant amount of time.
## Changing Column Constraints
### Remove the ignore rule (release M+1)
Same as when column is dropped, after the rename is completed, we need to [remove the ignore rule](#removing-the-ignore-rule-release-m2) in a subsequent release.
## Changing column constraints
Adding or removing a `NOT NULL` clause (or another constraint) can typically be
done without requiring downtime. However, this does require that any application
@ -218,14 +243,18 @@ You can check the following guides for each specific use case:
- [Adding `NOT NULL` constraints](not_null_constraints.md)
- [Adding limits to text columns](strings_and_the_text_data_type.md)
## Changing Column Types
## Changing column types
Changing the type of a column can be done using
`Gitlab::Database::MigrationHelpers#change_column_type_concurrently`. This
method works similarly to `rename_column_concurrently`. For example, let's say
we want to change the type of `users.username` from `string` to `text`.
we want to change the type of `users.username` from `string` to `text`:
### Step 1: Create A Regular Migration
1. [Create a regular migration](#create-a-regular-migration)
1. [Create a post-deployment migration](#create-a-post-deployment-migration)
1. [Casting data to a new type](#casting-data-to-a-new-type)
### Create a regular migration
A regular migration is used to create a new column with a temporary name along
with setting up some triggers to keep data in sync. Such a migration would look
@ -246,7 +275,7 @@ class ChangeUsersUsernameStringToText < Gitlab::Database::Migration[2.1]
end
```
### Step 2: Create A Post Deployment Migration
### Create a post-deployment migration
Next we need to clean up our changes using a post-deployment migration:
@ -293,13 +322,13 @@ specify the old default.
Doing this requires steps in two minor releases:
1. Add the `SafelyChangeColumnDefault` concern to the model and change the default in a post-migration.
1. Clean up the `SafelyChangeColumnDefault` concern in the next minor release.
1. [Add the `SafelyChangeColumnDefault` concern to the model](#add-the-safelychangecolumndefault-concern-to-the-model-and-change-the-default-in-a-post-migration) and change the default in a post-migration.
1. [Clean up the `SafelyChangeColumnDefault` concern](#clean-up-the-safelychangecolumndefault-concern-in-the-next-minor-release) in the next minor release.
We must wait a minor release before cleaning up the `SafelyChangeColumnDefault` because self-managed
releases bundle an entire minor release into a single zero-downtime deployment.
### Step 1: Add the `SafelyChangeColumnDefault` concern to the model and change the default in a post-migration
### Add the `SafelyChangeColumnDefault` concern to the model and change the default in a post-migration
The first step is to mark the column as safe to change in application code.
@ -333,12 +362,12 @@ You can consider [enabling lock retries](../migration_style_guide.md#usage-with-
when you run a migration on big tables, because it might take some time to
acquire a lock on this table.
### Step 2: Clean up the `SafelyChangeColumnDefault` concern in the next minor release
### Clean up the `SafelyChangeColumnDefault` concern in the next minor release
In the next minor release, create a new merge request to remove the `columns_changing_default` call. Also remove the `SafelyChangeColumnDefault` include
if it is not needed for a different column.
## Changing The Schema For Large Tables
## Changing the schema for large tables
While `change_column_type_concurrently` and `rename_column_concurrently` can be
used for changing the schema of a table without downtime, it doesn't work very
@ -354,7 +383,7 @@ down deployments.
For more information, see [the documentation on cleaning up batched background migrations](batched_background_migrations.md#cleaning-up).
## Adding Indexes
## Adding indexes
Adding indexes does not require downtime when `add_concurrent_index`
is used.
@ -362,15 +391,15 @@ is used.
See also [Migration Style Guide](../migration_style_guide.md#adding-indexes)
for more information.
## Dropping Indexes
## Dropping indexes
Dropping an index does not require downtime.
## Adding Tables
## Adding tables
This operation is safe as there's no code using the table just yet.
## Dropping Tables
## Dropping tables
Dropping tables can be done safely using a post-deployment migration, but only
if the application no longer uses the table.
@ -378,7 +407,7 @@ if the application no longer uses the table.
Add the table to [`db/docs/deleted_tables`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/db/docs/deleted_tables) using the process described in [database dictionary](database_dictionary.md#dropping-tables).
Even though the table is deleted, it is still referenced in database migrations.
## Renaming Tables
## Renaming tables
Renaming tables requires downtime as an application may continue
using the old table name during/after a database migration.
@ -389,7 +418,7 @@ table and creating a new one is the preferred way to "rename" the table.
Renaming a table is possible without downtime by following our multi-release
[rename table process](rename_database_tables.md#rename-table-without-downtime).
## Adding Foreign Keys
## Adding foreign keys
Adding foreign keys usually works in 3 steps:
@ -404,7 +433,7 @@ GitLab allows you to work around this by using
`Gitlab::Database::MigrationHelpers#add_concurrent_foreign_key`. This method
ensures that no downtime is needed.
## Removing Foreign Keys
## Removing foreign keys
This operation does not require downtime.

View File

@ -1,6 +1,6 @@
---
stage: Manage
group: Organization
stage: Create
group: Source Code
info: "To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments"
---

View File

@ -134,31 +134,28 @@ resource.labels.pod_name:"review-qa-raise-e-12chm0-migrations"
```mermaid
graph TD
A["build-qa-image, compile-production-assets<br/>(canonical default refs only)"];
B1[start-review-app-pipeline];
B[review-build-cng];
C[review-deploy];
C["review-deploy<br><br>Helm deploys the review app using the Cloud<br/>Native images built by the CNG-mirror pipeline.<br><br>Cloud Native images are deployed to the `review-apps`<br>Kubernetes (GKE) cluster, in the GCP `gitlab-review-apps` project."];
D[CNG-mirror];
E[review-qa-smoke, review-qa-reliable];
E[review-qa-smoke, review-qa-blocking, review-qa-non-blocking<br><br>gitlab-qa runs the e2e tests against the review app.];
A -->|once the `prepare` stage is done| B
B -.->|triggers a CNG-mirror pipeline and wait for it to be done| D
D -.->|polls until completed| B
B -->|once the `review-build-cng` job is done| C
C -->|once the `review-deploy` job is done| E
A --> B1
B1 --> B
B -.->|triggers a CNG-mirror pipeline| D
D -.->|depends on the multi-project pipeline| B
B --> C
C --> E
subgraph "1. gitlab `prepare` stage"
subgraph "1. gitlab-org/gitlab parent pipeline"
A
B1
end
subgraph "2. gitlab `review-prepare` stage"
subgraph "2. gitlab-org/gitlab child pipeline"
B
end
subgraph "3. gitlab `review` stage"
C["review-deploy<br><br>Helm deploys the review app using the Cloud<br/>Native images built by the CNG-mirror pipeline.<br><br>Cloud Native images are deployed to the `review-apps`<br>Kubernetes (GKE) cluster, in the GCP `gitlab-review-apps` project."]
end
subgraph "4. gitlab `qa` stage"
E[review-qa-smoke, review-qa-reliable<br><br>gitlab-qa runs the smoke and reliable suites against the review app.]
C
E
end
subgraph "CNG-mirror pipeline"

View File

@ -25,7 +25,7 @@ Advanced Search works with the following versions of Elasticsearch.
| GitLab 13.3 - 13.8 | Elasticsearch 6.4 - 7.x |
| GitLab 12.7 - 13.2 | Elasticsearch 6.x - 7.x |
Advanced Search follows Elasticsearch's [End of Life Policy](https://www.elastic.co/support/eol).
Advanced Search follows the [Elasticsearch end-of-life policy](https://www.elastic.co/support/eol).
When we change Elasticsearch supported versions in GitLab, we announce them in [deprecation notes](https://about.gitlab.com/handbook/marketing/blog/release-posts/#deprecations) in monthly release posts
before we remove them.
@ -224,7 +224,7 @@ The following Elasticsearch settings are available:
| `Maximum file size indexed` | See [the explanation in instance limits.](../../administration/instance_limits.md#maximum-file-size-indexed). |
| `Maximum field length` | See [the explanation in instance limits.](../../administration/instance_limits.md#maximum-field-length). |
| `Maximum bulk request size (MiB)` | The Maximum Bulk Request size is used by the GitLab Golang-based indexer processes and indicates how much data it ought to collect (and store in memory) in a given indexing process before submitting the payload to Elasticsearch's Bulk API. This setting should be used with the Bulk request concurrency setting (see below) and needs to accommodate the resource constraints of both the Elasticsearch hosts and the hosts running the GitLab Golang-based indexer either from the `gitlab-rake` command or the Sidekiq tasks. |
| `Bulk request concurrency` | The Bulk request concurrency indicates how many of the GitLab Golang-based indexer processes (or threads) can run in parallel to collect data to subsequently submit to Elasticsearch's Bulk API. This increases indexing performance, but fills the Elasticsearch bulk requests queue faster. This setting should be used together with the Maximum bulk request size setting (see above) and needs to accommodate the resource constraints of both the Elasticsearch hosts and the hosts running the GitLab Golang-based indexer either from the `gitlab-rake` command or the Sidekiq tasks. |
| `Bulk request concurrency` | The Bulk request concurrency indicates how many of the GitLab Golang-based indexer processes (or threads) can run in parallel to collect data to subsequently submit to the Elasticsearch Bulk API. This increases indexing performance, but fills the Elasticsearch bulk requests queue faster. This setting should be used together with the Maximum bulk request size setting (see above) and needs to accommodate the resource constraints of both the Elasticsearch hosts and the hosts running the GitLab Golang-based indexer either from the `gitlab-rake` command or the Sidekiq tasks. |
| `Client request timeout` | Elasticsearch HTTP client request timeout value in seconds. `0` means using the system default timeout value, which depends on the libraries that GitLab application is built upon. |
WARNING:

View File

@ -309,26 +309,23 @@ When it comes to Elasticsearch, RAM is the key resource. Elasticsearch themselve
- Ideally, 64 GB of RAM.
For CPU, Elasticsearch recommends at least 2 CPU cores, but Elasticsearch states common
setups use up to 8 cores. For more details on server specs, check out
[Elasticsearch's hardware guide](https://www.elastic.co/guide/en/elasticsearch/guide/current/hardware.html).
setups use up to 8 cores. For more details on server specs, check out the
[Elasticsearch hardware guide](https://www.elastic.co/guide/en/elasticsearch/guide/current/hardware.html).
Beyond the obvious, sharding comes into play. Sharding is a core part of Elasticsearch.
It allows for horizontal scaling of indices, which is helpful when you are dealing with
a large amount of data.
With the way GitLab does indexing, there is a **huge** amount of documents being
indexed. By utilizing sharding, you can speed up Elasticsearch's ability to locate
data, since each shard is a Lucene index.
indexed. By using sharding, you can speed up the ability of Elasticsearch to locate
data because each shard is a Lucene index.
If you are not using sharding, you are likely to hit issues when you start using
Elasticsearch in a production environment.
Keep in mind that an index with only one shard has **no scale factor** and will
likely encounter issues when called upon with some frequency.
If you need to know how many shards, read
[Elasticsearch's documentation on capacity planning](https://www.elastic.co/guide/en/elasticsearch/guide/2.x/capacity-planning.html),
as the answer is not straight forward.
An index with only one shard has **no scale factor** and is likely
to encounter issues when called upon with some frequency. See the
[Elasticsearch documentation on capacity planning](https://www.elastic.co/guide/en/elasticsearch/guide/2.x/capacity-planning.html).
The easiest way to determine if sharding is in use is to check the output of the
[Elasticsearch Health API](https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-health.html):

View File

@ -66,6 +66,22 @@ Due to limited customer usage, Browser Performance Testing is deprecated and wil
<div class="deprecation removal-170 breaking-change">
### GitLab Runner platforms and setup instructions in GraphQL API
Planned removal: GitLab <span class="removal-milestone">17.0</span> <span class="removal-date"></span>
WARNING:
This is a [breaking change](https://docs.gitlab.com/ee/development/deprecation_guidelines/).
Review the details carefully before upgrading.
The `runnerPlatforms` and `runnerSetup` queries to get GitLab Runner platforms and installation instructions
are deprecated and will be removed from the GraphQL API. For installation instructions, you should use the
[GitLab Runner documentation](https://docs.gitlab.com/runner/)
</div>
<div class="deprecation removal-170 breaking-change">
### Load Performance Testing is deprecated
Planned removal: GitLab <span class="removal-milestone">17.0</span> <span class="removal-date"></span>

View File

@ -23,7 +23,7 @@ repository being scanned. There are two kinds of customization:
## Disable predefined rules
You can disable predefined rules for any SAST analyzer. Disabled rules won't appear
You can disable predefined rules for any SAST analyzer. Disabled rules don't appear
on the [Pipeline Security](../index.md#view-security-scan-information-in-the-pipeline-security-tab)
tab or the [Vulnerability Report](../index.md#view-security-scan-information-in-the-vulnerability-report).
@ -103,7 +103,7 @@ differ based on the kind of configuration you're making.
| `[[$analyzer.ruleset]]` | Predefined rules | Defines modifications to an existing rule. |
| `interpolate` | All | If set to `true`, you can use `$VAR` in the configuration to evaluate environment variables. Use this feature with caution, so you don't leak secrets or tokens. (Default: `false`) |
| `description` | Passthroughs | Description of the custom ruleset. |
| `targetdir` | Passthroughs | The directory where the final configuration should be persisted. If empty, a directory with a random name is created. The directory can contain up to 100MB of files. |
| `targetdir` | Passthroughs | The directory where the final configuration should be persisted. If empty, a directory with a random name is created. The directory can contain up to 100 MB of files. |
| `validate` | Passthroughs | If set to `true`, the content of each passthrough is validated. The validation works for `yaml`, `xml`, `json` and `toml` content. The proper validator is identified based on the extension used in the `target` parameter of the `[[$analyzer.passthrough]]` section. (Default: `false`) |
| `timeout` | Passthroughs | The maximum time to spend to evaluate the passthrough chain, before timing out. The timeout cannot exceed 300 seconds. (Default: 60) |
@ -249,13 +249,13 @@ a higher precedence and can overwrite or append to data yielded by previous
passthroughs (depending on the `mode`). This is useful for cases where you need
to use or modify an existing configuration.
The amount of data generated by a single passthrough is limited to 1MB.
The amount of data generated by a single passthrough is limited to 1 MB.
| Setting | Applies to | Description |
| ------- | ---------- | ----------- |
| `type` | All | One of `file`, `raw`, `git` or `url`. |
| `target` | All | The target file to contain the data written by the passthrough evaluation. If empty, a random filename is used. |
| `mode` | All | If `overwrite`, the `target` file is overwritten. If `append`, new content is appended to the `target` file. Note that the `git` type only supports `overwrite`. (Default: `overwrite`) |
| `mode` | All | If `overwrite`, the `target` file is overwritten. If `append`, new content is appended to the `target` file. The `git` type only supports `overwrite`. (Default: `overwrite`) |
| `ref` | `type = "git"` | Contains the name of the branch or the SHA to pull. When using a branch name, specify it in the form `refs/heads/<branch>`, not `refs/remotes/<remote_name>/<branch>`. |
| `subdir` | `type = "git"` | Used to select a subdirectory of the Git repository as the configuration source. |
| `value` | All | For the `file`, `url`, and `git` types, defines the location of the file or Git repository. For the `raw` type, contains the inline configuration. |
@ -273,7 +273,7 @@ The amount of data generated by a single passthrough is limited to 1MB.
WARNING:
When using the `raw` passthrough with a YAML snippet, it's recommended to format all indentation
in the `sast-ruleset.toml` file as spaces. The YAML specification mandates spaces over tabs, and the
analyzer will fail to parse your custom ruleset unless the indentation is represented accordingly.
analyzer fails to parse your custom ruleset unless the indentation is represented accordingly.
## Examples
@ -317,8 +317,7 @@ With the following custom ruleset configuration, the following rules are omitted
### Override predefined rules of SAST analyzers
With the following custom ruleset configuration, vulnerabilities found with
`semgrep` with a type `CWE` and a value `322` will have their severity
overridden to `Critical`.
`semgrep` with a type `CWE` and a value `322` have their severity overridden to `Critical`.
```toml
[semgrep]
@ -416,7 +415,7 @@ Different passthrough types are demonstrated in this example:
- The `sast-rules` entry has a higher precedence because it appears later in
the configuration.
- If there's a filename collision between the two checkouts, files
from the `sast-rules` repository will overwrite files from the
from the `sast-rules` repository overwrite files from the
`myrules` repository.
- A `raw` passthrough, which writes its `value` to `/sgrules/insecure.yml`.
- A `url` passthrough, which fetches a configuration hosted at a URL and

View File

@ -150,9 +150,9 @@ in your CI/CD configuration file after you include the [`Secret-Detection.gitlab
You can set the tag to:
- A major version, like `4`. Your pipelines will use any minor or patch updates that are released within this major version.
- A minor version, like `4.5`. Your pipelines will use any patch updates that are released within this minor version.
- A patch version, like `4.5.0`. Your pipelines won't receive any updates.
- A major version, like `4`. Your pipelines use any minor or patch updates that are released within this major version.
- A minor version, like `4.5`. Your pipelines use any patch updates that are released within this minor version.
- A patch version, like `4.5.0`. Your pipelines don't receive any updates.
This example uses a specific minor version of the analyzer:

View File

@ -57,7 +57,7 @@ To reduce false negatives in [dependency scans](../../../user/application_securi
The project Security Dashboard shows the total number of vulnerabilities
over time, with up to 365 days of historical data. Data refresh begins daily at 01:15 UTC via a scheduled job.
Each refresh captures a snapshot of open vulnerabilities. Data is not backported to prior days
so vulnerabilities opened after the job has already run for the day will not be reflected in the
so vulnerabilities opened after the job has already run for the day cannot be reflected in the
counts until the following day's refresh job.
Project Security Dashboards show statistics for all vulnerabilities with a current status of `Needs triage` or `Confirmed` .
@ -105,7 +105,7 @@ To view project security status for a group:
1. Select **Security > Security Dashboard**.
Each project is assigned a letter [grade](#project-vulnerability-grades) according to the highest-severity open vulnerability.
Dismissed or resolved vulnerabilities are excluded. Each project can receive only one letter grade and will appear only once
Dismissed or resolved vulnerabilities are excluded. Each project can receive only one letter grade and appears only once
in the Project security status report.
To view vulnerabilities, go to the group's [vulnerability report](../vulnerability_report/index.md).

View File

@ -243,7 +243,7 @@ of the finding's [first identifier](https://gitlab.com/gitlab-org/security-produ
combine to create the value.
Examples of primary identifiers include `PluginID` for OWASP Zed Attack Proxy (ZAP), or `CVE` for
Trivy. Note that the identifier must be stable. Subsequent scans must return the same value for the
Trivy. The identifier must be stable. Subsequent scans must return the same value for the
same finding, even if the location has slightly changed.
### Report finding

View File

@ -108,7 +108,7 @@ certificate authority that is unknown to the agent.
To fix this issue, you can present the CA certificate file to the agent
by using a Kubernetes `configmap` and mount the file in the agent `/etc/ssl/certs` directory from where it
will be picked up automatically.
is picked up automatically.
For example, if your internal CA certificate is `myCA.pem`:
@ -200,7 +200,7 @@ are stored in the repository where the agent is configured.
```
The GitLab agent performs vulnerability scans by creating a job to scan each workload. If a scan
is interrupted, these jobs may be left behind and will need to be cleaned up before more jobs can
is interrupted, these jobs may be left behind and need to be cleaned up before more jobs can
be run. You can clean up these jobs by running:
```shell

View File

@ -52,7 +52,7 @@ Product Analytics uses several tools:
> - Moved to be behind the [feature flag](../../administration/feature_flags.md) named `product_analytics_admin_settings` in GitLab 15.7. Disabled by default.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available per project or for your entire instance, ask an administrator to [enable the feature flag](../../administration/feature_flags.md) named `cube_api_proxy`.
On self-managed GitLab, by default this feature is not available. To make it available per project or for your entire instance, ask an administrator to [enable the feature flag](../../administration/feature_flags.md) named `product_analytics_admin_settings`.
On GitLab.com, this feature is not available.
This feature is not ready for production use.
@ -86,7 +86,7 @@ Prerequisite:
> Introduced in GitLab 15.5 behind the [feature flag](../../administration/feature_flags.md) named `product_analytics_internal_preview`. Disabled by default.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available per project or for your entire instance, ask an administrator to [enable the feature flag](../../administration/feature_flags.md) named `cube_api_proxy`.
On self-managed GitLab, by default this feature is not available. To make it available per project or for your entire instance, ask an administrator to [enable the feature flag](../../administration/feature_flags.md) named `product_analytics_internal_preview`.
On GitLab.com, this feature is not available.
This feature is not ready for production use.

View File

@ -161,7 +161,20 @@ README.md @user1
The Code Owner for `README.md` would be `@user2`.
If you use sections, the last user _for each section_ is used.
If you use sections, the last pattern matching the file for each section is used.
For example, in a `CODEOWNERS` file using sections:
```plaintext
[README Owners]
README.md @user1 @user2
internal/README.md @user4
[README other owners]
README.md @user3
```
The Code Owners for the `README.md` in the root directory are `@user1`, `@user2`,
and `@user3`. The Code Owners for `internal/README.md` are `@user4` and `@user3`.
Only one CODEOWNERS pattern can match per file path.

View File

@ -6,11 +6,13 @@ module Gitlab
module Bridge
class Factory < Status::Factory
def self.extended_statuses
[[Status::Bridge::Failed],
[[Status::Bridge::Retryable],
[Status::Bridge::Failed],
[Status::Bridge::Manual],
[Status::Bridge::WaitingForResource],
[Status::Bridge::Play],
[Status::Bridge::Action]]
[Status::Bridge::Action],
[Status::Bridge::Retried]]
end
def self.common_helpers

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Status
module Bridge
class Retried < Status::Build::Retried
end
end
end
end
end

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Status
module Bridge
class Retryable < Status::Build::Retryable
end
end
end
end
end

View File

@ -403,8 +403,8 @@ namespace :gitlab do
desc 'Generate database docs yaml'
task generate: :environment do
FileUtils.mkdir_p(DB_DOCS_PATH) unless Dir.exist?(DB_DOCS_PATH)
FileUtils.mkdir_p(EE_DICTIONARY_PATH) unless Dir.exist?(EE_DICTIONARY_PATH)
FileUtils.mkdir_p(DB_DOCS_PATH)
FileUtils.mkdir_p(EE_DICTIONARY_PATH) if Gitlab.ee?
Rails.application.eager_load!

View File

@ -5152,9 +5152,6 @@ msgstr ""
msgid "Approved"
msgstr ""
msgid "Approved MRs"
msgstr ""
msgid "Approved the current merge request."
msgstr ""
@ -5448,9 +5445,6 @@ msgstr ""
msgid "AsanaService|User Personal Access Token. User must have access to the task. All comments are attributed to this user."
msgstr ""
msgid "Ascending"
msgstr ""
msgid "Ask again later"
msgstr ""
@ -8971,9 +8965,6 @@ msgstr ""
msgid "Closed %{epicTimeagoDate}"
msgstr ""
msgid "Closed MRs"
msgstr ""
msgid "Closed date"
msgstr ""
@ -11075,12 +11066,27 @@ msgstr ""
msgid "ContributionAnalytics|%{created} created, %{merged} merged, %{closed} closed."
msgstr ""
msgid "ContributionAnalytics|%{pushes}, more than %{commits} by %{contributors}."
msgid "ContributionAnalytics|%{pushes} by %{contributors}."
msgstr ""
msgid "ContributionAnalytics|Approved MRs"
msgstr ""
msgid "ContributionAnalytics|Closed MRs"
msgstr ""
msgid "ContributionAnalytics|Closed issues"
msgstr ""
msgid "ContributionAnalytics|Contribution analytics for issues, merge requests and push events since %{start_date}"
msgstr ""
msgid "ContributionAnalytics|Contributions per group member"
msgstr ""
msgid "ContributionAnalytics|Failed to load the contribution stats"
msgstr ""
msgid "ContributionAnalytics|Issues"
msgstr ""
@ -11093,9 +11099,18 @@ msgstr ""
msgid "ContributionAnalytics|Last week"
msgstr ""
msgid "ContributionAnalytics|Loading contribution stats for group members"
msgstr ""
msgid "ContributionAnalytics|Merge requests"
msgstr ""
msgid "ContributionAnalytics|Merged MRs"
msgstr ""
msgid "ContributionAnalytics|Name"
msgstr ""
msgid "ContributionAnalytics|No issues for the selected time period."
msgstr ""
@ -11105,6 +11120,15 @@ msgstr ""
msgid "ContributionAnalytics|No pushes for the selected time period."
msgstr ""
msgid "ContributionAnalytics|Opened MRs"
msgstr ""
msgid "ContributionAnalytics|Opened issues"
msgstr ""
msgid "ContributionAnalytics|Pushed"
msgstr ""
msgid "ContributionAnalytics|The given date range is larger than 31 days"
msgstr ""
@ -11114,10 +11138,10 @@ msgstr ""
msgid "ContributionAnalytics|There is too much data to calculate. Try lowering the period_limit setting in the insights configuration file."
msgstr ""
msgid "Contributions for %{calendar_date}"
msgid "ContributionAnalytics|Total Contributions"
msgstr ""
msgid "Contributions per group member"
msgid "Contributions for %{calendar_date}"
msgstr ""
msgid "Contributor"
@ -14095,9 +14119,6 @@ msgstr ""
msgid "Deprioritize label"
msgstr ""
msgid "Descending"
msgstr ""
msgid "Describe the goal of the changes and what reviewers should be aware of."
msgstr ""
@ -14840,9 +14861,6 @@ msgstr ""
msgid "Download codes"
msgstr ""
msgid "Download evidence JSON"
msgstr ""
msgid "Download export"
msgstr ""
@ -25258,9 +25276,6 @@ msgstr ""
msgid "Loading %{name}"
msgstr ""
msgid "Loading contribution stats for group members"
msgstr ""
msgid "Loading files, directories, and submodules in the path %{path} for commit reference %{ref}"
msgstr ""
@ -26516,9 +26531,6 @@ msgstr ""
msgid "Merged"
msgstr ""
msgid "Merged MRs"
msgstr ""
msgid "Merged branches are being deleted. This can take some time depending on the number of branches. Please refresh the page to see changes."
msgstr ""
@ -29379,6 +29391,9 @@ msgstr ""
msgid "Open errors"
msgstr ""
msgid "Open evidence JSON in new tab"
msgstr ""
msgid "Open in Gitpod"
msgstr ""
@ -29418,12 +29433,6 @@ msgstr ""
msgid "Opened"
msgstr ""
msgid "Opened MRs"
msgstr ""
msgid "Opened issues"
msgstr ""
msgid "OpenedNDaysAgo|Created"
msgstr ""
@ -34619,9 +34628,6 @@ msgstr ""
msgid "PushRule|Reject unverified users"
msgstr ""
msgid "Pushed"
msgstr ""
msgid "Pushes"
msgstr ""
@ -39906,9 +39912,6 @@ msgstr ""
msgid "Something went wrong while fetching details"
msgstr ""
msgid "Something went wrong while fetching group member contributions"
msgstr ""
msgid "Something went wrong while fetching latest comments."
msgstr ""
@ -44443,9 +44446,6 @@ msgstr ""
msgid "Total"
msgstr ""
msgid "Total Contributions"
msgstr ""
msgid "Total Score"
msgstr ""

View File

@ -228,7 +228,7 @@
"cheerio": "^1.0.0-rc.9",
"commander": "^2.20.3",
"custom-jquery-matchers": "^2.1.0",
"eslint": "8.28.0",
"eslint": "8.29.0",
"eslint-import-resolver-jest": "3.0.2",
"eslint-import-resolver-webpack": "0.13.2",
"eslint-plugin-import": "^2.26.0",

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, feature_category: :continuous_integration do
include ApiHelpers
include HttpIOHelpers
@ -809,14 +809,48 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
sign_in(user)
end
context 'when job is not retryable' do
context 'and the job is a bridge' do
let(:job) { create(:ci_bridge, :failed, :reached_max_descendant_pipelines_depth, pipeline: pipeline) }
it 'renders unprocessable_entity' do
post_retry
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
context 'and the job is a build' do
let(:job) { create(:ci_build, :deployment_rejected, pipeline: pipeline) }
it 'renders unprocessable_entity' do
post_retry
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
context 'when job is retryable' do
let(:job) { create(:ci_build, :retryable, pipeline: pipeline) }
context 'and the job is a bridge' do
let(:job) { create(:ci_bridge, :retryable, pipeline: pipeline) }
it 'redirects to the retried job page' do
post_retry
it 'responds :ok' do
post_retry
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_job_path(id: Ci::Build.last.id))
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'and the job is a build' do
let(:job) { create(:ci_build, :retryable, pipeline: pipeline) }
it 'redirects to the retried job page' do
post_retry
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_job_path(id: Ci::Build.last.id))
end
end
shared_examples_for 'retried job has the same attributes' do
@ -847,16 +881,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
end
context 'when job is not retryable' do
let(:job) { create(:ci_build, pipeline: pipeline) }
it 'renders unprocessable_entity' do
post_retry
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
def post_retry
post :retry, params: {
namespace_id: project.namespace,

View File

@ -33,6 +33,14 @@ FactoryBot.define do
end
end
trait :retried do
retried { true }
end
trait :retryable do
success
end
trait :created do
status { 'created' }
end

View File

@ -125,9 +125,5 @@ FactoryBot.define do
oauth_application factory: :oauth_application
cluster factory: %i(cluster with_installed_helm provided_by_gcp project)
end
factory :clusters_applications_cilium, class: 'Clusters::Applications::Cilium' do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
end
end

View File

@ -100,7 +100,6 @@ FactoryBot.define do
application_runner factory: %i(clusters_applications_runner installed)
application_jupyter factory: %i(clusters_applications_jupyter installed)
application_knative factory: %i(clusters_applications_knative installed)
application_cilium factory: %i(clusters_applications_cilium installed)
end
trait :with_domain do

View File

@ -5,56 +5,21 @@ require 'spec_helper'
RSpec.describe 'Work item', :js, feature_category: :team_planning do
let_it_be(:project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
let_it_be(:other_user) { create(:user) }
let_it_be(:work_item) { create(:work_item, project: project) }
context 'for signed in user' do
before do
project.add_developer(user)
project.add_developer(other_user)
sign_in(user)
visit project_work_items_path(project, work_items_path: work_item.id)
end
context 'in work item description' do
it 'shows GFM autocomplete', :aggregate_failures do
click_button "Edit description"
find('[aria-label="Description"]').send_keys("@#{user.username}")
wait_for_requests
page.within('.atwho-container') do
expect(page).to have_text(user.name)
end
end
it 'shows conflict message when description changes', :aggregate_failures do
click_button "Edit description"
scroll_to(find('[aria-label="Description"]'))
# without this for some reason the test fails when running locally
sleep 1
::WorkItems::UpdateService.new(
project: work_item.project,
current_user: other_user,
params: { description: "oh no!" }
).execute(work_item)
work_item.reload
find('[aria-label="Description"]').send_keys("oh yeah!")
warning = 'Someone edited the description at the same time you did.'
expect(page.find('[data-testid="work-item-description-conflicts"]')).to have_text(warning)
click_button "Save and overwrite"
expect(page.find('[data-testid="work-item-description"]')).to have_text("oh yeah!")
end
end
it_behaves_like 'work items status'
it_behaves_like 'work items assignees'
it_behaves_like 'work items labels'
it_behaves_like 'work items comments'
it_behaves_like 'work items description'
end
end

View File

@ -36,47 +36,18 @@ describe('SignInPage', () => {
});
it.each`
jiraConnectOauthEnabled | jiraConnectOauthSelfManagedEnabled | shouldRenderDotCom | shouldRenderMultiversion
${false} | ${false} | ${true} | ${false}
${false} | ${true} | ${true} | ${false}
${true} | ${false} | ${true} | ${false}
${true} | ${true} | ${false} | ${true}
jiraConnectOauthEnabled | shouldRenderDotCom | shouldRenderMultiversion
${false} | ${true} | ${false}
${false} | ${true} | ${false}
${true} | ${false} | ${true}
${true} | ${false} | ${true}
`(
'renders correct component when jiraConnectOauth is $jiraConnectOauthEnabled and jiraConnectOauthSelfManaged is $jiraConnectOauthSelfManagedEnabled',
({
jiraConnectOauthEnabled,
jiraConnectOauthSelfManagedEnabled,
shouldRenderDotCom,
shouldRenderMultiversion,
}) => {
createComponent({ jiraConnectOauthEnabled, jiraConnectOauthSelfManagedEnabled });
'renders correct component when jiraConnectOauth is $jiraConnectOauthEnabled',
({ jiraConnectOauthEnabled, shouldRenderDotCom, shouldRenderMultiversion }) => {
createComponent({ jiraConnectOauthEnabled });
expect(findSignInGitlabCom().exists()).toBe(shouldRenderDotCom);
expect(findSignInGitabMultiversion().exists()).toBe(shouldRenderMultiversion);
},
);
describe('when jiraConnectOauthSelfManaged is false', () => {
beforeEach(() => {
createComponent({ jiraConnectOauthSelfManaged: false, props: { hasSubscriptions: true } });
});
it('renders SignInGitlabCom with correct props', () => {
expect(findSignInGitlabCom().props()).toEqual({ hasSubscriptions: true });
});
describe('when error event is emitted', () => {
it('emits another error event', () => {
findSignInGitlabCom().vm.$emit('error');
expect(wrapper.emitted('error')).toHaveLength(1);
});
});
describe('when sign-in-oauth event is emitted', () => {
it('emits another sign-in-oauth event', () => {
findSignInGitlabCom().vm.$emit('sign-in-oauth');
expect(wrapper.emitted('sign-in-oauth')[0]).toEqual([]);
});
});
});
});

View File

@ -82,6 +82,10 @@ describe('date_format_utility.js', () => {
);
});
it('defaults to 00:00 if no time is provided', () => {
expect(utils.dateAndTimeToISOString(new Date('2021-08-21'))).toBe('2021-08-21T00:00:00.000Z');
});
it('throws if date in invalid', () => {
expect(() => utils.dateAndTimeToISOString('Invalid date', '10:00')).toThrow(
'Argument should be a Date instance',

View File

@ -1,4 +1,4 @@
import { GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
import { GlCollapsibleListbox } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
@ -35,78 +35,23 @@ describe('ProjectsDropdown', () => {
);
};
const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
const findSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType);
const findDropdownItemByIndex = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
const findNoResults = () => wrapper.findByTestId('empty-result-message');
const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
afterEach(() => {
wrapper.destroy();
spyFetchProjects.mockReset();
});
describe('No projects found', () => {
beforeEach(() => {
createComponent('_non_existent_project_');
});
it('renders empty results message', () => {
expect(findNoResults().text()).toBe('No matching results');
});
it('shows GlSearchBoxByType with default attributes', () => {
expect(findSearchBoxByType().exists()).toBe(true);
expect(findSearchBoxByType().vm.$attrs).toMatchObject({
placeholder: 'Search projects',
});
});
});
describe('Search term is empty', () => {
beforeEach(() => {
createComponent('');
});
it('renders all projects when search term is empty', () => {
expect(findAllDropdownItems()).toHaveLength(3);
expect(findDropdownItemByIndex(0).text()).toBe('_project_1_');
expect(findDropdownItemByIndex(1).text()).toBe('_project_2_');
expect(findDropdownItemByIndex(2).text()).toBe('_project_3_');
});
it('should not be selected on the inactive project', () => {
expect(wrapper.vm.isSelected('_project_1_')).toBe(false);
});
});
describe('Projects found', () => {
beforeEach(() => {
createComponent('_project_1_', { targetProjectId: '1' });
});
it('renders only the project searched for', () => {
expect(findAllDropdownItems()).toHaveLength(1);
expect(findDropdownItemByIndex(0).text()).toBe('_project_1_');
});
it('should not display empty results message', () => {
expect(findNoResults().exists()).toBe(false);
});
it('should signify this project is selected', () => {
expect(findDropdownItemByIndex(0).props('isChecked')).toBe(true);
});
it('should signify the project is not selected', () => {
expect(wrapper.vm.isSelected('_not_selected_project_')).toBe(false);
});
describe('Custom events', () => {
it('should emit selectProject if a project is clicked', () => {
findDropdownItemByIndex(0).vm.$emit('click');
findDropdown().vm.$emit('select', '1');
expect(wrapper.emitted('selectProject')).toEqual([['1']]);
expect(wrapper.vm.filterTerm).toBe('_project_1_');
});
});
});
@ -117,8 +62,7 @@ describe('ProjectsDropdown', () => {
});
it('renders only the project searched for', () => {
expect(findAllDropdownItems()).toHaveLength(1);
expect(findDropdownItemByIndex(0).text()).toBe('_project_1_');
expect(findDropdown().props('items')).toEqual([{ text: '_project_1_', value: '1' }]);
});
});
});

View File

@ -40,13 +40,11 @@ describe('Evidence Block', () => {
});
it('renders the correct hover text for the download', () => {
expect(wrapper.findComponent(GlLink).attributes('title')).toBe('Download evidence JSON');
expect(wrapper.findComponent(GlLink).attributes('title')).toBe('Open evidence JSON in new tab');
});
it('renders the correct file link for download', () => {
expect(wrapper.findComponent(GlLink).attributes().download).toMatch(
/v1\.1-evidences-[0-9]+\.json/,
);
it('renders a link that opens in a new tab', () => {
expect(wrapper.findComponent(GlLink).attributes().target).toBe('_blank');
});
describe('sha text', () => {

View File

@ -0,0 +1,27 @@
import { nextTick } from 'vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import ReportWidgetContainer from '~/vue_merge_request_widget/components/report_widget_container.vue';
describe('app/assets/javascripts/vue_merge_request_widget/components/report_widget_container.vue', () => {
let wrapper;
const createComponent = ({ slot } = {}) => {
wrapper = mountExtended(ReportWidgetContainer, {
slots: {
default: slot,
},
});
};
it('hides the container when children has no content', async () => {
createComponent({ slot: `<span><b></b></span>` });
await nextTick();
expect(wrapper.isVisible()).toBe(false);
});
it('shows the container when children have no content', async () => {
createComponent({ slot: `<span><b>test</b></span>` });
await nextTick();
expect(wrapper.isVisible()).toBe(true);
});
});

View File

@ -40,7 +40,8 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou
it 'matches correct extended statuses' do
expect(factory.extended_statuses)
.to eq [Gitlab::Ci::Status::Bridge::Failed]
.to eq [Gitlab::Ci::Status::Bridge::Retryable,
Gitlab::Ci::Status::Bridge::Failed]
end
it 'fabricates a failed bridge status' do
@ -54,7 +55,7 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou
expect(status.label).to be_nil
expect(status.status_tooltip).to eq "#{s_('CiStatusText|failed')} - (unknown failure)"
expect(status).not_to have_details
expect(status).not_to have_action
expect(status).to have_action
end
context 'failed with downstream_pipeline_creation_failed' do
@ -136,6 +137,30 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou
end
end
context 'when the bridge is successful and therefore retryable' do
let(:bridge) { create(:ci_bridge, :success) }
it 'matches correct core status' do
expect(factory.core_status).to be_a Gitlab::Ci::Status::Success
end
it 'matches correct extended statuses' do
expect(factory.extended_statuses)
.to eq [Gitlab::Ci::Status::Bridge::Retryable]
end
it 'fabricates a retryable build status' do
expect(status).to be_a Gitlab::Ci::Status::Bridge::Retryable
end
it 'fabricates status with correct details' do
expect(status.text).to eq s_('CiStatusText|passed')
expect(status.icon).to eq 'status_success'
expect(status.favicon).to eq 'favicon_status_success'
expect(status).to have_action
end
end
private
def create_bridge(*traits)

View File

@ -37,8 +37,18 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
describe '#retryable?' do
let(:bridge) { create(:ci_bridge, :success) }
it 'returns false' do
expect(bridge.retryable?).to eq(false)
it 'returns true' do
expect(bridge.retryable?).to eq(true)
end
context 'without ci_recreate_downstream_pipeline ff' do
before do
stub_feature_flags(ci_recreate_downstream_pipeline: false)
end
it 'returns false' do
expect(bridge.retryable?).to eq(false)
end
end
end
@ -590,4 +600,10 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
expect(bridge.metadata.partition_id).to eq(ci_testing_partition_id)
end
end
describe '#deployment_job?' do
subject { bridge.deployment_job? }
it { is_expected.to eq(false) }
end
end

View File

@ -287,6 +287,12 @@ RSpec.describe Ci::Processable do
end
end
context 'when the processable is a bridge' do
subject(:processable) { create(:ci_bridge, pipeline: pipeline) }
it_behaves_like 'retryable processable'
end
context 'when the processable is a build' do
subject(:processable) { create(:ci_build, pipeline: pipeline) }

View File

@ -1,17 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Applications::Cilium do
let(:cilium) { create(:clusters_applications_cilium) }
include_examples 'cluster application core specs', :clusters_applications_cilium
include_examples 'cluster application status specs', :clusters_applications_cilium
include_examples 'cluster application initial status specs'
describe '#allowed_to_uninstall?' do
subject { cilium.allowed_to_uninstall? }
it { is_expected.to be false }
end
end

View File

@ -920,6 +920,29 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
describe '#invalidate_personal_projects_count_of_owner' do
context 'for personal projects' do
let_it_be(:namespace_user) { create(:user) }
let_it_be(:project) { create(:project, namespace: namespace_user.namespace) }
it 'invalidates personal_project_count cache of the the owner of the personal namespace' do
expect(Rails.cache).to receive(:delete).with(['users', namespace_user.id, 'personal_projects_count'])
project.invalidate_personal_projects_count_of_owner
end
end
context 'for projects in groups' do
let_it_be(:project) { create(:project, namespace: create(:group)) }
it 'does not invalidates any cache' do
expect(Rails.cache).not_to receive(:delete)
project.invalidate_personal_projects_count_of_owner
end
end
end
describe '#default_pipeline_lock' do
let(:project) { build_stubbed(:project) }

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::RetryJobService do
RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
using RSpec::Parameterized::TableSyntax
let_it_be(:reporter) { create(:user) }
let_it_be(:developer) { create(:user) }
@ -27,6 +27,22 @@ RSpec.describe Ci::RetryJobService do
project.add_reporter(reporter)
end
shared_context 'retryable bridge' do
let_it_be(:downstream_project) { create(:project, :repository) }
let_it_be_with_refind(:job) do
create(:ci_bridge, :success,
pipeline: pipeline, downstream: downstream_project, description: 'a trigger job', ci_stage: stage
)
end
let_it_be(:job_to_clone) { job }
before do
job.update!(retried: false)
end
end
shared_context 'retryable build' do
let_it_be_with_reload(:job) do
create(:ci_build, :success, pipeline: pipeline, ci_stage: stage)
@ -102,6 +118,14 @@ RSpec.describe Ci::RetryJobService do
end
end
shared_examples_for 'does not retry the job' do
it 'returns :not_retryable and :unprocessable_entity' do
expect(subject.message).to be('Job cannot be retried')
expect(subject.payload[:reason]).to eq(:not_retryable)
expect(subject.payload[:job]).to eq(job)
end
end
shared_examples_for 'retries the job' do
it_behaves_like 'clones the job'
@ -189,6 +213,20 @@ RSpec.describe Ci::RetryJobService do
expect { service.clone!(create(:ci_build).present) }.to raise_error(TypeError)
end
context 'when the job to be cloned is a bridge' do
include_context 'retryable bridge'
it_behaves_like 'clones the job'
context 'when given variables' do
let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
it 'does not give variables to the new bridge' do
expect { new_job }.not_to raise_error
end
end
end
context 'when the job to be cloned is a build' do
include_context 'retryable build'
@ -287,7 +325,33 @@ RSpec.describe Ci::RetryJobService do
subject { service.execute(job) }
context 'when the job to be retried is a bridge' do
context 'and it is not retryable' do
let_it_be(:job) { create(:ci_bridge, :failed, :reached_max_descendant_pipelines_depth) }
it_behaves_like 'does not retry the job'
end
include_context 'retryable bridge'
it_behaves_like 'retries the job'
context 'when given variables' do
let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
it 'does not give variables to the new bridge' do
expect { new_job }.not_to raise_error
end
end
end
context 'when the job to be retried is a build' do
context 'and it is not retryable' do
let_it_be(:job) { create(:ci_build, :deployment_rejected, pipeline: pipeline) }
it_behaves_like 'does not retry the job'
end
include_context 'retryable build'
it_behaves_like 'retries the job'

View File

@ -163,7 +163,7 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :projects
describe 'after create actions' do
it 'invalidate personal_projects_count caches' do
expect(user).to receive(:invalidate_personal_projects_count)
expect(Rails.cache).to receive(:delete).with(['users', user.id, 'personal_projects_count'])
create_project(user, opts)
end

View File

@ -151,10 +151,22 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
it_behaves_like 'deleting the project'
it 'invalidates personal_project_count cache' do
expect(user).to receive(:invalidate_personal_projects_count)
context 'personal projects count cache' do
context 'when the executor is the creator of the project itself' do
it 'invalidates personal_project_count cache of the the owner of the personal namespace' do
expect(user).to receive(:invalidate_personal_projects_count)
destroy_project(project, user, {})
destroy_project(project, user, {})
end
end
context 'when the executor is the instance administrator', :enable_admin_mode do
it 'invalidates personal_project_count cache of the the owner of the personal namespace' do
expect(user).to receive(:invalidate_personal_projects_count)
destroy_project(project, create(:admin), {})
end
end
end
context 'with running pipelines' do

View File

@ -126,6 +126,12 @@ RSpec.describe Projects::TransferService do
expect(project.namespace).to eq(user.namespace)
end
it 'invalidates personal_project_count cache of the the owner of the personal namespace' do
expect(user).to receive(:invalidate_personal_projects_count)
execute_transfer
end
context 'the owner of the namespace does not have a direct membership in the project residing in the group' do
it 'creates a project membership record for the owner of the namespace, with OWNER access level, after the transfer' do
execute_transfer
@ -161,6 +167,17 @@ RSpec.describe Projects::TransferService do
end
end
context 'personal namespace -> group', :enable_admin_mode do
let(:executor) { create(:admin) }
it 'invalidates personal_project_count cache of the the owner of the personal namespace' \
'that previously held the project' do
expect(user).to receive(:invalidate_personal_projects_count)
execute_transfer
end
end
context 'when transfer succeeds' do
before do
group.add_owner(user)

View File

@ -0,0 +1,109 @@
# frozen_string_literal: true
RSpec.shared_examples 'work items status' do
let(:state_selector) { '[data-testid="work-item-state-select"]' }
it 'sucessfully shows and changes the status of the work item' do
expect(find(state_selector)).to have_content 'Open'
find(state_selector).select("Closed")
wait_for_requests
expect(find(state_selector)).to have_content 'Closed'
expect(work_item.reload.state).to eq('closed')
end
end
RSpec.shared_examples 'work items comments' do
let(:form_selector) { '[data-testid="work-item-add-comment"]' }
it 'sucessfully creates and shows comments' do
click_button 'Add a comment'
find(form_selector).fill_in(with: "Test comment")
click_button "Comment"
wait_for_requests
expect(page).to have_content "Test comment"
end
end
RSpec.shared_examples 'work items assignees' do
it 'sucessfully assigns the current user by searching' do
# The button is only when the mouse is over the input
find('[data-testid="work-item-asssignees-input"]').fill_in(with: user.username)
wait_for_requests
# submit and simulate blur to save
send_keys(:enter)
find("body").click
wait_for_requests
expect(work_item.assignees).to include(user)
end
end
RSpec.shared_examples 'work items labels' do
it 'sucessfully assigns a label' do
label = create(:label, project: work_item.project, title: "testing-label")
find('[data-testid="work-item-labels-input"]').fill_in(with: label.title)
wait_for_requests
# submit and simulate blur to save
send_keys(:enter)
find("body").click
wait_for_requests
expect(work_item.labels).to include(label)
end
end
RSpec.shared_examples 'work items description' do
it 'shows GFM autocomplete', :aggregate_failures do
click_button "Edit description"
find('[aria-label="Description"]').send_keys("@#{user.username}")
wait_for_requests
page.within('.atwho-container') do
expect(page).to have_text(user.name)
end
end
context 'on conflict' do
let_it_be(:other_user) { create(:user) }
let(:expected_warning) { 'Someone edited the description at the same time you did.' }
before do
project.add_developer(other_user)
end
it 'shows conflict message when description changes', :aggregate_failures do
click_button "Edit description"
wait_for_requests
::WorkItems::UpdateService.new(
project: work_item.project,
current_user: other_user,
params: { description: "oh no!" }
).execute(work_item)
wait_for_requests
find('[aria-label="Description"]').send_keys("oh yeah!")
expect(page.find('[data-testid="work-item-description-conflicts"]')).to have_text(expected_warning)
click_button "Save and overwrite"
expect(page.find('[data-testid="work-item-description"]')).to have_text("oh yeah!")
end
end
end

View File

@ -5683,10 +5683,10 @@ eslint-visitor-keys@^3.3.0:
resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826"
integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==
eslint@8.28.0:
version "8.28.0"
resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.28.0.tgz#81a680732634677cc890134bcdd9fdfea8e63d6e"
integrity sha512-S27Di+EVyMxcHiwDrFzk8dJYAaD+/5SoWKxL1ri/71CRHsnJnRDPNt2Kzj24+MT9FDupf4aqqyqPrvI8MvQ4VQ==
eslint@8.29.0:
version "8.29.0"
resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.29.0.tgz#d74a88a20fb44d59c51851625bc4ee8d0ec43f87"
integrity sha512-isQ4EEiyUjZFbEKvEGJKKGBwXtvXX+zJbkVKCgTuB9t/+jUBcy8avhkEwWJecI15BkRkOYmvIM5ynbhRjEkoeg==
dependencies:
"@eslint/eslintrc" "^1.3.3"
"@humanwhocodes/config-array" "^0.11.6"