Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-01-04 15:20:28 +00:00
parent ac72b79188
commit 7de116050a
65 changed files with 701 additions and 938 deletions

View File

@ -1,6 +1,6 @@
include:
- project: gitlab-org/quality/pipeline-common
ref: 8.3.1
ref: 8.3.2
file:
- /ci/danger-review.yml

View File

@ -283,7 +283,6 @@ Layout/ArrayAlignment:
- 'spec/lib/gitlab/usage_data/topology_spec.rb'
- 'spec/lib/gitlab/utils/merge_hash_spec.rb'
- 'spec/lib/gitlab/visibility_level_spec.rb'
- 'spec/models/application_setting_spec.rb'
- 'spec/models/ci/bridge_spec.rb'
- 'spec/models/ci/build_spec.rb'
- 'spec/models/ci/group_spec.rb'

View File

@ -99,7 +99,6 @@ Rails/FilePath:
- 'spec/lib/gitlab/feature_categories_spec.rb'
- 'spec/lib/gitlab/file_hook_spec.rb'
- 'spec/lib/gitlab/jwt_authenticatable_spec.rb'
- 'spec/lib/gitlab/legacy_http_spec.rb'
- 'spec/lib/gitlab/mail_room/mail_room_spec.rb'
- 'spec/lib/gitlab/middleware/multipart/handler_spec.rb'
- 'spec/lib/gitlab/multi_destination_logger_spec.rb'

View File

@ -1842,7 +1842,6 @@ RSpec/ContextWording:
- 'spec/lib/gitlab/kubernetes/default_namespace_spec.rb'
- 'spec/lib/gitlab/kubernetes/kube_client_spec.rb'
- 'spec/lib/gitlab/legacy_github_import/client_spec.rb'
- 'spec/lib/gitlab/legacy_http_spec.rb'
- 'spec/lib/gitlab/lfs/client_spec.rb'
- 'spec/lib/gitlab/lfs_token_spec.rb'
- 'spec/lib/gitlab/lograge/custom_options_spec.rb'

View File

@ -236,7 +236,6 @@ RSpec/ExpectInHook:
- 'spec/lib/gitlab/kas/client_spec.rb'
- 'spec/lib/gitlab/kubernetes/kube_client_spec.rb'
- 'spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb'
- 'spec/lib/gitlab/legacy_http_spec.rb'
- 'spec/lib/gitlab/memory/instrumentation_spec.rb'
- 'spec/lib/gitlab/memory/jemalloc_spec.rb'
- 'spec/lib/gitlab/metrics/boot_time_tracker_spec.rb'

View File

@ -96,7 +96,6 @@ RSpec/InstanceVariable:
- 'spec/lib/gitlab/git/repository_spec.rb'
- 'spec/lib/gitlab/import_export/group/tree_restorer_spec.rb'
- 'spec/lib/gitlab/import_export/project/tree_restorer_spec.rb'
- 'spec/lib/gitlab/legacy_http_spec.rb'
- 'spec/lib/gitlab/patch/prependable_spec.rb'
- 'spec/lib/gitlab/popen_spec.rb'
- 'spec/lib/gitlab/project_transfer_spec.rb'

View File

@ -2560,7 +2560,6 @@ Style/InlineDisableAnnotation:
- 'lib/gitlab/legacy_github_import/importer.rb'
- 'lib/gitlab/legacy_github_import/issuable_formatter.rb'
- 'lib/gitlab/legacy_github_import/user_formatter.rb'
- 'lib/gitlab/legacy_http.rb'
- 'lib/gitlab/lets_encrypt/client.rb'
- 'lib/gitlab/lfs_token.rb'
- 'lib/gitlab/local_and_remote_storage_migration/base_migrater.rb'

View File

@ -1 +1 @@
3.1.4
3.2.2

View File

@ -200,6 +200,7 @@
"WorkItemWidgetNotifications",
"WorkItemWidgetProgress",
"WorkItemWidgetRequirementLegacy",
"WorkItemWidgetRolledupDates",
"WorkItemWidgetStartAndDueDate",
"WorkItemWidgetStatus",
"WorkItemWidgetTestReports",

View File

@ -107,7 +107,7 @@ export default {
</script>
<template>
<div class="gl-ml-7">
<div>
<refs-list
v-if="hasBranches"
:has-containing-refs="hasContainingBranches"

View File

@ -1,6 +1,6 @@
<script>
import { GlCollapse, GlBadge, GlButton, GlIcon, GlSkeletonLoader } from '@gitlab/ui';
import { CONTAINING_COMMIT, FETCH_CONTAINING_REFS_EVENT } from '../constants';
import { CONTAINING_COMMIT, FETCH_CONTAINING_REFS_EVENT, BRANCHES_REF_TYPE } from '../constants';
export default {
name: 'RefsList',
@ -55,6 +55,9 @@ export default {
isLoadingRefs() {
return this.isLoading && !this.containingRefs.length;
},
refIcon() {
return this.refType === BRANCHES_REF_TYPE ? 'branch' : 'tag';
},
},
methods: {
toggleCollapse() {
@ -75,7 +78,8 @@ export default {
</script>
<template>
<div class="gl-pt-4">
<div class="gl-p-5 gl-border-b gl-border-gray-50">
<gl-icon :name="refIcon" :size="14" class="gl-ml-2 gl-mr-3" />
<span data-testid="title" class="gl-mr-2">{{ namespace }}</span>
<gl-badge
v-for="ref in tippingRefs"

View File

@ -67,6 +67,7 @@ export const TOKEN_TITLE_AUTHOR = __('Author');
export const TOKEN_TITLE_CONFIDENTIAL = __('Confidential');
export const TOKEN_TITLE_CONTACT = s__('Crm|Contact');
export const TOKEN_TITLE_GROUP = __('Group');
export const TOKEN_TITLE_GROUP_INVITE = __('Group invite');
export const TOKEN_TITLE_LABEL = __('Label');
export const TOKEN_TITLE_PROJECT = __('Project');
export const TOKEN_TITLE_MILESTONE = __('Milestone');
@ -90,6 +91,7 @@ export const TOKEN_TYPE_AUTHOR = 'author';
export const TOKEN_TYPE_CONFIDENTIAL = 'confidential';
export const TOKEN_TYPE_CONTACT = 'contact';
export const TOKEN_TYPE_GROUP = 'group';
export const TOKEN_TYPE_GROUP_INVITE = 'group-invite';
export const TOKEN_TYPE_EPIC = 'epic';
// As health status gets reused between issue lists and boards
// this is in the shared constants. Until we have not decoupled the EE filtered search bar

View File

@ -160,7 +160,6 @@ module ProjectAuthorizations
end
def publish_removed_event
return if ::Feature.disabled?(:user_approval_rules_removal)
return if @removed_user_ids.none?
events = @affected_project_ids.flat_map do |project_id|

View File

@ -33,7 +33,8 @@ module WorkItems
current_user_todos: 15,
award_emoji: 16,
linked_items: 17,
color: 18 # EE-only
color: 18, # EE-only
rolledup_dates: 19 # EE-only
}
def self.available_widgets

View File

@ -36,7 +36,7 @@
%span.cgray= n_('parent', 'parents', @commit.parents.count)
- @commit.parents.each do |parent|
= link_to parent.short_id, project_commit_path(@project, parent), class: "commit-sha"
#js-commit-branches-and-tags{ data: { full_path: @project.full_path, commit_sha: @commit.short_id } }
#js-commit-branches-and-tags{ data: { full_path: @project.full_path, commit_sha: @commit.short_id } }
.well-segment.merge-request-info
.icon-container

View File

@ -1,8 +0,0 @@
---
name: use_gitlab_http_v2
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132742
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/426549
milestone: '16.5'
type: development
group: group::pipeline authoring
default_enabled: true

View File

@ -1,8 +0,0 @@
---
name: user_approval_rules_removal
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/138691
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/434141
milestone: '16.7'
type: development
group: group::code review
default_enabled: true

View File

@ -35,7 +35,6 @@ Rails.autoloaders.each do |autoloader|
'html_parser' => 'HTMLParser',
'html_gitlab' => 'HTMLGitlab',
'http' => 'HTTP',
'legacy_http' => 'LegacyHTTP',
'http_connection_adapter' => 'HTTPConnectionAdapter',
'http_clone_enabled_check' => 'HTTPCloneEnabledCheck',
'hangouts_chat_http_override' => 'HangoutsChatHTTPOverride',

View File

@ -0,0 +1,8 @@
---
migration_job_name: BackfillOwaspTopTenOfVulnerabilityReads
description: Backfills owasp_top_10 column for vulnerability_reads table.
feature_category: vulnerability_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/139544
milestone: '16.8'
queued_migration_version: 20231214111617
finalize_after: '2024-1-10'

View File

@ -0,0 +1,49 @@
# frozen_string_literal: true
class AddWorkItemsRolledupDatesWidget < Gitlab::Database::Migration[2.2]
milestone '16.8'
class WorkItemType < MigrationRecord
self.table_name = 'work_item_types'
end
class WidgetDefinition < MigrationRecord
self.table_name = 'work_item_widget_definitions'
end
restrict_gitlab_migration gitlab_schema: :gitlab_main
disable_ddl_transaction!
WIDGET_NAME = 'Rolledup dates'
WIDGET_ENUM_VALUE = 19
WORK_ITEM_TYPES = ['Epic'].freeze
def up
widgets = WORK_ITEM_TYPES.each_with_object([]) do |type_name, result|
type = WorkItemType.find_by_name_and_namespace_id(type_name, nil)
unless type
Gitlab::AppLogger.warn("type #{type_name} is missing, not adding widget")
next
end
result << {
work_item_type_id: type.id,
name: WIDGET_NAME,
widget_type: WIDGET_ENUM_VALUE
}
end
return if widgets.empty?
WidgetDefinition.upsert_all(
widgets,
unique_by: :index_work_item_widget_definitions_on_default_witype_and_name
)
end
def down
WidgetDefinition.where(name: WIDGET_NAME).delete_all
end
end

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
class QueueBackfillOwaspTopTenOfVulnerabilityReads < Gitlab::Database::Migration[2.2]
milestone '16.8'
MIGRATION = "BackfillOwaspTopTenOfVulnerabilityReads"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 10000
SUB_BATCH_SIZE = 50
restrict_gitlab_migration gitlab_schema: :gitlab_main
disable_ddl_transaction!
def up
queue_batched_background_migration(
MIGRATION,
:vulnerability_reads,
:vulnerability_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :vulnerability_reads, :vulnerability_id, [])
end
end

View File

@ -0,0 +1 @@
7c587bfbe8349640c9766d41a3c287bd27d8bc03d7a3f17d9a47c49f31bed2d8

View File

@ -0,0 +1 @@
4acc75ab443f518a7340cea213f5803dee10c39f48d07b8a2e284f71d4c1eedd

View File

@ -5443,6 +5443,7 @@ Input type: `MemberRoleUpdateInput`
| <a id="mutationmemberroleupdatedescription"></a>`description` | [`String`](#string) | Description of the member role. |
| <a id="mutationmemberroleupdateid"></a>`id` | [`MemberRoleID!`](#memberroleid) | ID of the member role to mutate. |
| <a id="mutationmemberroleupdatename"></a>`name` | [`String`](#string) | Name of the member role. |
| <a id="mutationmemberroleupdatepermissions"></a>`permissions` | [`[MemberRolePermission!]`](#memberrolepermission) | List of all customizable permissions. |
#### Fields
@ -29233,6 +29234,24 @@ Represents a legacy requirement widget.
| <a id="workitemwidgetrequirementlegacylegacyiid"></a>`legacyIid` **{warning-solid}** | [`Int`](#int) | **Deprecated** in 15.9. Use Work Item IID instead. |
| <a id="workitemwidgetrequirementlegacytype"></a>`type` | [`WorkItemWidgetType`](#workitemwidgettype) | Widget type. |
### `WorkItemWidgetRolledupDates`
Represents the rolledup dates widget.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="workitemwidgetrolledupdatesduedate"></a>`dueDate` | [`Date`](#date) | Due date for the work item. |
| <a id="workitemwidgetrolledupdatesduedateisfixed"></a>`dueDateIsFixed` | [`Boolean`](#boolean) | Indicates if the due date for the work item is fixed. |
| <a id="workitemwidgetrolledupdatesduedatesourcingmilestone"></a>`dueDateSourcingMilestone` | [`Milestone`](#milestone) | Indicates which milestone sources the rolledup due date. |
| <a id="workitemwidgetrolledupdatesduedatesourcingworkitem"></a>`dueDateSourcingWorkItem` | [`WorkItem`](#workitem) | Indicates which work_item sources the rolledup due date. |
| <a id="workitemwidgetrolledupdatesstartdate"></a>`startDate` | [`Date`](#date) | Start date for the work item. |
| <a id="workitemwidgetrolledupdatesstartdateisfixed"></a>`startDateIsFixed` | [`Boolean`](#boolean) | Indicates if the start date for the work item is fixed. |
| <a id="workitemwidgetrolledupdatesstartdatesourcingmilestone"></a>`startDateSourcingMilestone` | [`Milestone`](#milestone) | Indicates which milestone sources the rolledup start date. |
| <a id="workitemwidgetrolledupdatesstartdatesourcingworkitem"></a>`startDateSourcingWorkItem` | [`WorkItem`](#workitem) | Indicates which work_item sources the rolledup start date. |
| <a id="workitemwidgetrolledupdatestype"></a>`type` | [`WorkItemWidgetType`](#workitemwidgettype) | Widget type. |
### `WorkItemWidgetStartAndDueDate`
Represents a start and due date widget.
@ -32134,6 +32153,7 @@ Type of a work item widget.
| <a id="workitemwidgettypenotifications"></a>`NOTIFICATIONS` | Notifications widget. |
| <a id="workitemwidgettypeprogress"></a>`PROGRESS` | Progress widget. |
| <a id="workitemwidgettyperequirement_legacy"></a>`REQUIREMENT_LEGACY` | Requirement Legacy widget. |
| <a id="workitemwidgettyperolledup_dates"></a>`ROLLEDUP_DATES` | Rolledup Dates widget. |
| <a id="workitemwidgettypestart_and_due_date"></a>`START_AND_DUE_DATE` | Start And Due Date widget. |
| <a id="workitemwidgettypestatus"></a>`STATUS` | Status widget. |
| <a id="workitemwidgettypetest_reports"></a>`TEST_REPORTS` | Test Reports widget. |
@ -33820,6 +33840,7 @@ Implementations:
- [`WorkItemWidgetNotifications`](#workitemwidgetnotifications)
- [`WorkItemWidgetProgress`](#workitemwidgetprogress)
- [`WorkItemWidgetRequirementLegacy`](#workitemwidgetrequirementlegacy)
- [`WorkItemWidgetRolledupDates`](#workitemwidgetrolledupdates)
- [`WorkItemWidgetStartAndDueDate`](#workitemwidgetstartandduedate)
- [`WorkItemWidgetStatus`](#workitemwidgetstatus)
- [`WorkItemWidgetTestReports`](#workitemwidgettestreports)

View File

@ -2020,7 +2020,8 @@ The `assignee` column is deprecated. We now show it as a single-sized array `ass
Promotes an issue to an epic by adding a comment with the `/promote`
[quick action](../user/project/quick_actions.md).
For more information about promoting issues to epics, see [Manage epics](../user/group/epics/manage_epics.md#promote-an-issue-to-an-epic).
For more information about promoting issues to epics, see
[Promote an issue to an epic](../user/project/issues/managing_issues.md#promote-an-issue-to-an-epic).
```plaintext
POST /projects/:id/issues/:issue_iid/notes

View File

@ -738,5 +738,5 @@ instance.
To share the cache between concurrent runners, you can either:
- Use the `[runners.docker]` section of the runners' `config.toml` to configure a single mount point on the host that
is mapped to `/cache` in each container, preventing the runner from creating unique volume names.
is mapped to `/cache` in each container, preventing the runner from creating unique volume names.
- Use a distributed cache.

View File

@ -276,7 +276,7 @@ include:
To use DAST on the default branch:
1. Set up a new [service](#create-an-ecs-service). This service will be used to deploy a temporary
DAST environment.
DAST environment.
1. Use the `CI_AWS_ECS_SERVICE` variable to set the name.
1. Set the scope to the `dast-default` environment.
1. Add the following to your `.gitlab-ci.yml` file:

View File

@ -853,7 +853,7 @@ This indicates the GitLab Runner does not have permission to start the
1. Check that `privileged = true` is set in the `config.toml`.
1. Make sure the CI job has the right Runner tags to use these
privileged runners.
privileged runners.
### Error: `cgroups: cgroup mountpoint does not exist: unknown`

View File

@ -108,7 +108,7 @@ To create a static environment, in your `.gitlab-ci.yml` file:
1. Define a job in the `deploy` stage.
1. In the job, define the environment `name` and `url`. If an
environment of that name doesn't exist when the pipeline runs, it is created.
environment of that name doesn't exist when the pipeline runs, it is created.
NOTE:
Some characters cannot be used in environment names. For more information about the

View File

@ -39,7 +39,7 @@ In the `.gitlab-ci.yml` file, you can define:
- [Create your first `.gitlab-ci.yml` file](quick_start/index.md).
- [View all the possible keywords that you can use in the `.gitlab-ci.yml` file](yaml/index.md).
the configuration.
the configuration.
- Use the [pipeline editor](pipeline_editor/index.md) to edit or [visualize](pipeline_editor/index.md#visualize-ci-configuration)
your CI/CD configuration.

View File

@ -108,7 +108,7 @@ The following are example projects that demonstrate review app configuration:
Other examples of review apps:
- <i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
[Cloud Native Development with GitLab](https://www.youtube.com/watch?v=jfIyQEwrocw).
[Cloud Native Development with GitLab](https://www.youtube.com/watch?v=jfIyQEwrocw).
- [Review apps for Android](https://about.gitlab.com/blog/2020/05/06/how-to-create-review-apps-for-android-with-gitlab-fastlane-and-appetize-dot-io/).
## Route Maps

View File

@ -33,7 +33,7 @@ When you use SaaS runners:
- The VM is active only for the duration of the job and immediately deleted. This means that any changes that your job makes to the virtual machine will not be available to a subsequent job.
- The virtual machine where your job runs has `sudo` access with no password.
- The storage is shared by the operating system, the image with pre-installed software, and a copy of your cloned repository.
This means that the available free disk space for your jobs to use is reduced.
This means that the available free disk space for your jobs to use is reduced.
NOTE:
Jobs handled by SaaS runners on GitLab.com **time out after 3 hours**, regardless of the timeout configured in a project.

View File

@ -39,7 +39,7 @@ The new runner registration workflow has the following benefits:
- Preserved ownership records for runners, and minimized impact on users.
- The addition of a unique system ID ensures that you can reuse the same authentication token across
multiple runners. For more information, see [Reusing a GitLab Runner configuration](https://docs.gitlab.com/runner/fleet_scaling/#reusing-a-gitlab-runner-configuration).
multiple runners. For more information, see [Reusing a GitLab Runner configuration](https://docs.gitlab.com/runner/fleet_scaling/#reusing-a-gitlab-runner-configuration).
## Estimated time frame for planned changes
@ -61,7 +61,7 @@ To avoid a broken workflow, you must:
1. [Create a shared runner](runners_scope.md#create-a-shared-runner-with-a-runner-authentication-token) and obtain the authentication token.
1. Replace the registration token in your runner registration workflow with the
authentication token.
authentication token.
## Using registration tokens after GitLab 17.0
@ -159,7 +159,9 @@ Several runner configuration options cannot be set during runner registration. T
The following configuration options are no longer supported in [`values.yaml`](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/blob/main/values.yaml):
```yaml
## All these fields are DEPRECATED and the runner WILL FAIL TO START if you specify them
## All these fields are DEPRECATED and the runner WILL FAIL TO START with GitLab Runner 18.0 and later if you specify them.
## If a runner authentication token is specified in runnerRegistrationToken, the registration will succeed, however the
## other values will be ignored.
runnerRegistrationToken: ""
locked: true
tags: ""

View File

@ -576,13 +576,16 @@ A runner can have one of the following statuses.
As an administrator, you can view runner statistics to learn about the performance of your runner fleet.
- The **Median job queued time** value is calculated by sampling the queue duration of the
The **Median job queued time** value is calculated by sampling the queue duration of the
most recent 100 jobs that were run by Instance runners. Jobs from only the latest 5000
runners are considered.
- The median is a value that falls into the 50th percentile: half of the jobs
The median is a value that falls into the 50th percentile: half of the jobs
queued for longer than the median value, and half of the jobs queued for less than the
median value.
To view runner statistics:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. Select **CI/CD > Runners**.
1. Select **View metrics**.

View File

@ -967,4 +967,4 @@ As a workaround you can either:
- Use [File-type](#use-file-type-cicd-variables) CI/CD variables for large environment variables where possible.
- If a single large variable is larger than `ARG_MAX`, try using [Secure Files](../secure_files/index.md), or
bring the file to the job through some other mechanism.
bring the file to the job through some other mechanism.

View File

@ -1634,7 +1634,7 @@ use the new cache, instead of rebuilding the dependencies.
**Additional details**:
- The cache `key` is a SHA computed from the most recent commits
that changed each listed file.
that changed each listed file.
If neither file is changed in any commits, the fallback key is `default`.
##### `cache:key:prefix`

View File

@ -32,13 +32,13 @@ or [`ee/lib/ee/gitlab/ci/config/entry`](https://gitlab.com/gitlab-org/gitlab/-/t
An entry is represented by a class that inherits from;
- `Entry::Node`: for simple keywords.
(e.g. [`Entry::Stage`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/config/entry/stage.rb))
(e.g. [`Entry::Stage`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/config/entry/stage.rb))
- `Entry::Simplifiable`: for keywords that have multiple structures.
For example, [`Entry::Retry`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/config/entry/retry.rb) can be a simple number or a hash configuration.
For example, [`Entry::Retry`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/config/entry/retry.rb) can be a simple number or a hash configuration.
- `Entry::ComposableArray`: for keywords that have a list of single-type sub-elements.
For example, [`Entry::Includes`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/config/entry/includes.rb) has a list of `Entry::Include` elements.
For example, [`Entry::Includes`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/config/entry/includes.rb) has a list of `Entry::Include` elements.
- `Entry::ComposableHash`: for keywords that have single-type sub-elements with user-defined keys.
For example, [`Entry::Variables`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/config/entry/variables.rb) has a list of `Entry::Variable` elements with user-defined keys.
For example, [`Entry::Variables`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/config/entry/variables.rb) has a list of `Entry::Variable` elements with user-defined keys.
### Helper Classes

View File

@ -138,7 +138,7 @@ is planned to add the ability to create a MR from here.
### Events
- `done` - Emitted after the file has been committed. Use this to redirect the
user to the pipeline, for example.
user to the pipeline, for example.
### Template file location

View File

@ -53,7 +53,7 @@ and they serve us and our users well. Some examples of these principles are that
If a job fails and we notify a user that it was successful, it can have severe negative consequences.
- Feedback needs to be available when a user needs it and data cannot disappear unexpectedly when engineers need it.
- It all doesn't matter if the platform is not secure and we
are leaking credentials or secrets.
are leaking credentials or secrets.
- When a user provides a set of preconditions in a form of CI/CD configuration, the result should be deterministic each time a pipeline runs, because otherwise the platform might not be trustworthy.
- If it is fast, simple to use and has a great UX it will serve our users well.

View File

@ -24,7 +24,7 @@ This structure allows the group to think through a proposed change, gather feedb
### Design Documents
When the work ahead may affect more than a single group, stage or potentially an entirement department (for example, all of the Frontend team) then it is likely that there is need for a [Design Document](https://about.gitlab.com/handbook/engineering/architecture/workflow/).
When the work ahead may affect more than a single group, stage or potentially an entire department (for example, all of the Frontend team) then it is likely that there is need for a [Design Document](https://about.gitlab.com/handbook/engineering/architecture/workflow/).
This is well documented in the handbook, but to touch on it shortly, it is **the best way** to propose large changes and gather the required feedback and support to move forward. These documents are version controlled, keep evolving with time and are a great way to share a complex understanding across the entire organization. They also require a coach, which is a great way to involve someone with a lot of experience with larger changes. This process is shared across all engineering departments and is owned by the CTO.
@ -57,9 +57,9 @@ Very small changes may have a very broad impact. For example, a change to any ES
For recommending certain code patterns in our documentation, you can write the MR that apply your proposed change, share it broadly with the department and if no strong objections are raised, merge your change. This is more efficient than RFCs because of the bias for action, while also gathering all the feedback necessary for everyone to feel included.
If you'd like to propose a major change to the technological stack (Vue to React, JavaScript to TypeScript, etc.), start by reaching out on Slack to gauge interest. Always ask yourself whether or not the problems that you see can be fixed from our current tech stack, as we should always try to fix our problems with the tools we already have. Other departments, such as Backend and QA, do not have a clear process to propose technological changes either. That is because these changes would require huge investements from the company and probably cannot be decided without involving high-ranking executives from engineering.
If you'd like to propose a major change to the technological stack (Vue to React, JavaScript to TypeScript, etc.), start by reaching out on Slack to gauge interest. Always ask yourself whether or not the problems that you see can be fixed from our current tech stack, as we should always try to fix our problems with the tools we already have. Other departments, such as Backend and QA, do not have a clear process to propose technological changes either. That is because these changes would require huge investments from the company and probably cannot be decided without involving high-ranking executives from engineering.
Instead, consider starting a Design Document that explains the problem and try to solve it with our current tools. Invite contribution from the department and research this thoroughly as there can only be two outcomes. Either the problem **can** be solved with our current tools or it cannot. If it can, this is a huge with for our teams since we've fixed and issue without the need to completly change our stack, and if it cannot, then the Design Document can be the start of the larger conversation around the technological change.
Instead, consider starting a Design Document that explains the problem and try to solve it with our current tools. Invite contribution from the department and research this thoroughly as there can only be two outcomes. Either the problem **can** be solved with our current tools or it cannot. If it can, this is a huge win for our teams since we've fixed an issue without the need to completely change our stack, and if it cannot, then the Design Document can be the start of the larger conversation around the technological change.
## Widget Architecture

View File

@ -22,14 +22,14 @@ subdomain. You can set the GitLab Pages hostname:
As `/etc/hosts` don't support wildcard hostnames, you must configure one entry
for GitLab Pages, and then one entry for each page site:
```plaintext
127.0.0.1 gdk.test # If you're using GDK
127.0.0.1 pages.gdk.test # Pages host
# Any namespace/group/user needs to be added
# as a subdomain to the pages host. This is because
# /etc/hosts doesn't accept wildcards
127.0.0.1 root.pages.gdk.test # for the root pages
```
```plaintext
127.0.0.1 gdk.test # If you're using GDK
127.0.0.1 pages.gdk.test # Pages host
# Any namespace/group/user needs to be added
# as a subdomain to the pages host. This is because
# /etc/hosts doesn't accept wildcards
127.0.0.1 root.pages.gdk.test # for the root pages
```
### With DNS wildcard alternatives
@ -151,8 +151,8 @@ GitLab Pages access control is disabled by default. To enable it:
1. Create an [Instance-wide OAuth application](../../integration/oauth_provider.md#create-an-instance-wide-application)
with the `api` scope.
1. Set the value of your `redirect-uri` to the `pages-domain` authorization endpoint
(for example, `http://pages.gdk.test:3010/auth`).
The `redirect-uri` must not contain any GitLab Pages site domain.
(for example, `http://pages.gdk.test:3010/auth`).
The `redirect-uri` must not contain any GitLab Pages site domain.
1. Add the auth client configuration:

View File

@ -716,9 +716,9 @@ We also run tests with a single database in nightly scheduled pipelines, and in
Single database tests run in two modes:
1. **Single database with one connection**. Where GitLab connects to all the tables using one connection pool.
This runs through all the jobs that end with `-single-db`
This runs through all the jobs that end with `-single-db`
1. **Single database with two connections**. Where GitLab connects to `gitlab_main`, `gitlab_ci` database tables
using different database connections. This runs through all the jobs that end with `-single-db-ci-connection`.
using different database connections. This runs through all the jobs that end with `-single-db-ci-connection`.
If you want to force tests to run with a single database, you can add the `pipeline:run-single-db` label to the merge request.

View File

@ -445,7 +445,7 @@ projects, only one of the following tags should be added to a job:
- `gitlab-org`: Jobs randomly use privileged and unprivileged runners.
- `gitlab-org-docker`: Jobs must use a privileged runner. If you need [Docker-in-Docker support](../../ci/docker/using_docker_build.md#use-docker-in-docker),
use `gitlab-org-docker` instead of `gitlab-org`.
use `gitlab-org-docker` instead of `gitlab-org`.
The `gitlab-org-docker` tag is added by the `.use-docker-in-docker` job
definition above.

View File

@ -88,10 +88,10 @@ As for the artifacts, the GitLab Runner attempts to upload them three times, aft
To address the above two scenarios, it is advised to do the following prior to upgrading:
1. Plan your maintenance.
1. Pause your runners or block new jobs from starting by adding following to your `/etc/gitlab/gitlab.rb`:
1. Pause your runners, or block new jobs from starting by adding the following to your `/etc/gitlab/gitlab.rb`:
```ruby
nginx['custom_gitlab_server_config'] = "location /api/v4/jobs/request {\n deny all;\n return 503;\n}\n"
nginx['custom_gitlab_server_config'] = "location ^~ /api/v4/jobs/request {\n deny all;\n return 503;\n}\n"
```
And reconfigure GitLab with:

View File

@ -11,10 +11,6 @@ to them.
## Create an epic
> - The New Epic form [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/211533) in GitLab 13.2.
> - In [GitLab 13.7](https://gitlab.com/gitlab-org/gitlab/-/issues/229621) and later, the New Epic button on the Epics list opens the New Epic form.
> - In [GitLab 13.9](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45948) and later, you can create a new epic from an empty roadmap.
Prerequisites:
- You must have at least the Reporter role for the epic's group.
@ -175,6 +171,9 @@ To do so, either:
- In the upper-right corner, select **epic actions** (**{ellipsis_v}**) and then **Reopen epic**
- Use the `/reopen` [quick action](../../project/quick_actions.md).
You can also create an epic by
[promoting an issue](../../project/issues/managing_issues.md#promote-an-issue-to-an-epic).
## Go to an epic from an issue
If an issue belongs to an epic, you can go to the parent epic with the
@ -453,40 +452,6 @@ To move an issue to another epic:
1. Go to the **Child issues and epics** section.
1. Drag issues into the desired parent epic in the visible hierarchy.
### Promote an issue to an epic
> [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/37081) from GitLab Ultimate to GitLab Premium in 12.8.
Prerequisites:
- The project to which the issue belongs must be in a group.
- You must have at least the Reporter role the project's immediate parent group.
- You must either:
- Have at least the Reporter role for the project.
- Be the author of the issue.
- Be assigned to the issue.
You can promote an issue to an epic with the `/promote`
[quick action](../../project/quick_actions.md#issues-merge-requests-and-epics).
NOTE:
Promoting a confidential issue to an epic makes all information
related to the issue public as epics are public to group members.
When an issue is promoted to an epic:
- If the issue was confidential, an additional warning is displayed first.
- An epic is created in the same group as the project of the issue.
- Subscribers of the issue are notified that the epic was created.
The following issue metadata is copied to the epic:
- Title, description, activity/comment thread.
- Upvotes and downvotes.
- Participants.
- Group labels that the issue already has.
- Parent epic.
### Use an epic template for repeating issues
You can create a spreadsheet template to manage a pattern of consistently repeating issues.

View File

@ -367,12 +367,35 @@ Alternatively:
## Promote an issue to an epic **(PREMIUM ALL)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/3777) in GitLab 11.6.
> - Moved from GitLab Ultimate to GitLab Premium in 12.8.
> - Promoting issues to epics via the UI [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/233974) in GitLab 13.6.
You can promote an issue to an [epic](../../group/epics/index.md) in the immediate parent group.
NOTE:
Promoting a confidential issue to an epic makes all information
related to the issue public, as epics are public to group members.
When an issue is promoted to an epic:
- If the issue was confidential, an additional warning is displayed first.
- An epic is created in the same group as the project of the issue.
- Subscribers of the issue are notified that the epic was created.
The following issue metadata is copied to the epic:
- Title, description, activity, and comment threads.
- Upvotes and downvotes.
- Participants.
- Group labels that the issue had.
- Parent epic.
Prerequisites:
- The project to which the issue belongs must be in a group.
- You must have at least the Reporter role the project's immediate parent group.
- You must either:
- Have at least the Reporter role for the project.
- Be the author of the issue.
- Be assigned to the issue.
To promote an issue to an epic:
1. On the left sidebar, select **Search or go to** and find your project.
@ -382,8 +405,6 @@ To promote an issue to an epic:
Alternatively, you can use the `/promote` [quick action](../quick_actions.md#issues-merge-requests-and-epics).
Read more about [promoting an issues to epics](../../group/epics/manage_epics.md#promote-an-issue-to-an-epic).
## Promote an issue to an incident
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/296787) in GitLab 14.5.

View File

@ -4,7 +4,7 @@ group: IDE
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Tutorial: Connect a remote machine to the Web IDE **(FREE ALL BETA)**
# Tutorial: Connect a remote machine to the Web IDE **(FREE ALL)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/95169) in GitLab 15.4 [with a flag](../../../administration/feature_flags.md) named `vscode_web_ide`. Disabled by default.
> - [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/371084) in GitLab 15.7.

View File

@ -4,7 +4,7 @@ group: IDE
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Remote development **(FREE ALL BETA)**
# Remote development **(FREE ALL)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/95169) in GitLab 15.4 [with a flag](../../../administration/feature_flags.md) named `vscode_web_ide`. Disabled by default.
> - [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/371084) in GitLab 15.7.

View File

@ -0,0 +1,86 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Backfills owasp_top_10 column for vulnerability_reads table.
class BackfillOwaspTopTenOfVulnerabilityReads < BatchedMigrationJob
operation_name :set_owasp_top_10
feature_category :vulnerability_management
OWASP_TOP_10 = {
"A1:2017-Injection" => 1,
"A1:2017" => 1,
"A2:2017-Broken Authentication" => 2,
"A2:2017" => 2,
"A3:2017-Sensitive Data Exposure" => 3,
"A3:2017" => 3,
"A4:2017-XML External Entities (XXE)" => 4,
"A4:2017" => 4,
"A5:2017-Broken Access Control" => 5,
"A5:2017" => 5,
"A6:2017-Security Misconfiguration" => 6,
"A6:2017" => 6,
"A7:2017-Cross-Site Scripting (XSS)" => 7,
"A7:2017" => 7,
"A8:2017-Insecure Deserialization" => 8,
"A8:2017" => 8,
"A9:2017-Using Components with Known Vulnerabilities" => 9,
"A9:2017" => 9,
"A10:2017-Insufficient Logging & Monitoring" => 10,
"A10:2017" => 10,
"A1:2021-Broken Access Control" => 11,
"A1:2021" => 11,
"A2:2021-Cryptographic Failures" => 12,
"A2:2021" => 12,
"A3:2021-Injection" => 13,
"A3:2021" => 13,
"A4:2021-Insecure Design" => 14,
"A4:2021" => 14,
"A5:2021-Security Misconfiguration" => 15,
"A5:2021" => 15,
"A6:2021-Vulnerable and Outdated Components" => 16,
"A6:2021" => 16,
"A7:2021-Identification and Authentication Failures" => 17,
"A7:2021" => 17,
"A8:2021-Software and Data Integrity Failures" => 18,
"A8:2021" => 18,
"A9:2021-Security Logging and Monitoring Failures" => 19,
"A9:2021" => 19,
"A10:2021-Server-Side Request Forgery" => 20,
"A10:2021" => 20
}.with_indifferent_access.freeze
UPDATE_SQL = <<-SQL.squish
UPDATE vulnerability_reads AS vr
SET owasp_top_10 =
CASE selected_ids.external_id
#{OWASP_TOP_10.map { |external_id, value| "WHEN '#{external_id}' THEN #{value}" }.join(' ')}
ELSE vr.owasp_top_10
END
FROM (
SELECT vr.id, vi.external_id
FROM vulnerability_reads vr
INNER JOIN vulnerability_occurrences vo ON vr.vulnerability_id = vo.vulnerability_id
INNER JOIN vulnerability_occurrence_identifiers voi ON vo.id = voi.occurrence_id
INNER JOIN vulnerability_identifiers vi ON voi.identifier_id = vi.id
WHERE LOWER(vi.external_type) = 'owasp'
AND vi.external_id IN (?)
AND vr.id IN (?)
) AS selected_ids
WHERE vr.id = selected_ids.id
SQL
class VulnerabilitiesRead < ::ApplicationRecord
self.table_name = 'vulnerability_reads'
end
def perform
each_sub_batch do |sub_batch|
update_query = VulnerabilitiesRead.sanitize_sql([UPDATE_SQL, OWASP_TOP_10.keys, sub_batch.select(:id)])
connection.execute(update_query)
end
end
end
end
end

View File

@ -23,7 +23,8 @@ module Gitlab
current_user_todos: 'Current user todos',
award_emoji: 'Award emoji',
linked_items: 'Linked items',
color: 'Color'
color: 'Color',
rolledup_dates: 'Rolledup dates'
}.freeze
WIDGETS_FOR_TYPE = {
@ -128,7 +129,8 @@ module Gitlab
:current_user_todos,
:award_emoji,
:linked_items,
:color
:color,
:rolledup_dates
],
ticket: [
:assignees,

View File

@ -26,13 +26,6 @@ module Gitlab
}.freeze
DEFAULT_READ_TOTAL_TIMEOUT = 30.seconds
SILENT_MODE_ALLOWED_METHODS = [
Net::HTTP::Get,
Net::HTTP::Head,
Net::HTTP::Options,
Net::HTTP::Trace
].freeze
# We are explicitly assigning these constants because they are used in the codebase.
Error = HTTParty::Error
Response = HTTParty::Response
@ -42,11 +35,7 @@ module Gitlab
class << self
::Gitlab::HTTP_V2::SUPPORTED_HTTP_METHODS.each do |method|
define_method(method) do |path, options = {}, &block|
if ::Feature.enabled?(:use_gitlab_http_v2, Feature.current_request)
::Gitlab::HTTP_V2.public_send(method, path, http_v2_options(options), &block) # rubocop:disable GitlabSecurity/PublicSend
else
::Gitlab::LegacyHTTP.public_send(method, path, options, &block) # rubocop:disable GitlabSecurity/PublicSend
end
::Gitlab::HTTP_V2.public_send(method, path, http_v2_options(options), &block) # rubocop:disable GitlabSecurity/PublicSend -- method is validated to make sure it is one of the methods in Gitlab::HTTP_V2::SUPPORTED_HTTP_METHODS
end
end
@ -59,18 +48,14 @@ module Gitlab
# TODO: This method is subject to be removed
# We have this for now because we explicitly use the `perform_request` method in some places.
def perform_request(http_method, path, options, &block)
if ::Feature.enabled?(:use_gitlab_http_v2, Feature.current_request)
method_name = http_method::METHOD.downcase.to_sym
method_name = http_method::METHOD.downcase.to_sym
unless ::Gitlab::HTTP_V2::SUPPORTED_HTTP_METHODS.include?(method_name)
raise ArgumentError, "Unsupported HTTP method: '#{method_name}'."
end
# Use `::Gitlab::HTTP_V2.get/post/...` methods
::Gitlab::HTTP_V2.public_send(method_name, path, http_v2_options(options), &block) # rubocop:disable GitlabSecurity/PublicSend
else
::Gitlab::LegacyHTTP.perform_request(http_method, path, options, &block)
unless ::Gitlab::HTTP_V2::SUPPORTED_HTTP_METHODS.include?(method_name)
raise ArgumentError, "Unsupported HTTP method: '#{method_name}'."
end
# Use `::Gitlab::HTTP_V2.get/post/...` methods
::Gitlab::HTTP_V2.public_send(method_name, path, http_v2_options(options), &block) # rubocop:disable GitlabSecurity/PublicSend -- method is validated to make sure it is one of the methods in Gitlab::HTTP_V2::SUPPORTED_HTTP_METHODS
end
private

View File

@ -1,78 +0,0 @@
# frozen_string_literal: true
#
# IMPORTANT: With the new development of the 'gitlab-http' gem (https://gitlab.com/gitlab-org/gitlab/-/issues/415686),
# no additional change should be implemented in this class. This class will be removed after migrating all
# the usages to the new gem.
#
require_relative 'http_connection_adapter'
module Gitlab
class LegacyHTTP # rubocop:disable Gitlab/NamespacedClass
include HTTParty # rubocop:disable Gitlab/HTTParty
class << self
alias_method :httparty_perform_request, :perform_request
end
connection_adapter ::Gitlab::HTTPConnectionAdapter
def self.perform_request(http_method, path, options, &block)
raise_if_blocked_by_silent_mode(http_method)
log_info = options.delete(:extra_log_info)
options_with_timeouts =
if !options.has_key?(:timeout)
options.with_defaults(Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS)
else
options
end
return httparty_perform_request(http_method, path, options_with_timeouts, &block) if options[:stream_body]
start_time = nil
read_total_timeout = options.fetch(:timeout, Gitlab::HTTP::DEFAULT_READ_TOTAL_TIMEOUT)
httparty_perform_request(http_method, path, options_with_timeouts) do |fragment|
start_time ||= ::Gitlab::Metrics::System.monotonic_time
elapsed = ::Gitlab::Metrics::System.monotonic_time - start_time
if elapsed > read_total_timeout
raise Gitlab::HTTP::ReadTotalTimeout, "Request timed out after #{elapsed} seconds"
end
yield fragment if block
end
rescue HTTParty::RedirectionTooDeep
raise Gitlab::HTTP::RedirectionTooDeep
rescue *Gitlab::HTTP::HTTP_ERRORS => e
extra_info = log_info || {}
extra_info = log_info.call(e, path, options) if log_info.respond_to?(:call)
Gitlab::ErrorTracking.log_exception(e, extra_info)
raise e
end
def self.try_get(path, options = {}, &block)
self.get(path, options, &block) # rubocop:disable Style/RedundantSelf
rescue *Gitlab::HTTP::HTTP_ERRORS
nil
end
def self.raise_if_blocked_by_silent_mode(http_method)
return unless blocked_by_silent_mode?(http_method)
::Gitlab::SilentMode.log_info(
message: 'Outbound HTTP request blocked',
outbound_http_request_method: http_method.to_s
)
raise Gitlab::HTTP::SilentModeBlockedError,
'only get, head, options, and trace methods are allowed in silent mode'
end
def self.blocked_by_silent_mode?(http_method)
::Gitlab::SilentMode.enabled? && Gitlab::HTTP::SILENT_MODE_ALLOWED_METHODS.exclude?(http_method)
end
end
end

View File

@ -23148,6 +23148,9 @@ msgstr ""
msgid "Group information"
msgstr ""
msgid "Group invite"
msgstr ""
msgid "Group jobs by"
msgstr ""
@ -29934,7 +29937,7 @@ msgstr ""
msgid "MemberRole|can't be changed"
msgstr ""
msgid "MemberRole|cannot be changed because it is already assigned to a user. Please create a new Member Role instead"
msgid "MemberRole|cannot be changed. Please create a new Member Role instead."
msgstr ""
msgid "MemberRole|cannot be deleted because it is already assigned to a user. Please disassociate the member role from all users before deletion."

View File

@ -98,6 +98,7 @@ RUN set -eux; \
gdk config set gitaly.skip_setup true \
&& gdk config set workhorse.skip_setup true \
&& gdk config set gitlab_shell.skip_setup true \
&& cp .ruby-version ./gitlab/ \
&& cp .tool-versions ./gitlab/ \
&& make redis/redis.conf all \
&& gdk kill

View File

@ -0,0 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
factory :work_items_dates_source, class: 'WorkItems::DatesSource' do
work_item
end
end

View File

@ -0,0 +1,225 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillOwaspTopTenOfVulnerabilityReads,
feature_category: :vulnerability_management do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
let(:scanners) { table(:vulnerability_scanners) }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:vulnerability_reads) { table(:vulnerability_reads) }
let(:vulnerability_findings) { table(:vulnerability_occurrences) }
let(:vulnerability_occurrence_identifiers) { table(:vulnerability_occurrence_identifiers) }
let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
let(:namespace) { namespaces.create!(name: 'user', path: 'user') }
let(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
let(:user) { users.create!(username: 'john_doe', email: 'johndoe@gitlab.com', projects_limit: 10) }
let(:scanner) { scanners.create!(project_id: project.id, external_id: 'external_id', name: 'Test Scanner') }
shared_context 'with vulnerability data' do
let(:external_id) { '' }
let(:external_type) { '' }
let(:identifier_name) { '' }
let(:vulnerability_1) { create_vulnerability(title: 'vulnerability 1') }
let(:vulnerability_2) { create_vulnerability(title: 'vulnerability 2') }
let(:vulnerability_3) { create_vulnerability(title: 'vulnerability 3') }
let(:vuln_identifier) do
create_identifier(external_id: external_id, external_type: external_type, name: identifier_name)
end
let(:vuln_finding) do
create_finding(vulnerability_id: vulnerability_1.id, primary_identifier_id: vuln_identifier.id)
end
let!(:vulnerability_read_1) { create_vulnerability_read(vulnerability_id: vulnerability_1.id) }
let!(:vulnerability_read_2) { create_vulnerability_read(vulnerability_id: vulnerability_2.id) }
let!(:vulnerability_read_3) { create_vulnerability_read(vulnerability_id: vulnerability_3.id) }
before do
create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding.id, identifier_id: vuln_identifier.id)
end
end
describe '#perform' do
subject(:perform_migration) do
described_class.new(
start_id: vulnerability_reads.first.vulnerability_id,
end_id: vulnerability_reads.last.vulnerability_id,
batch_table: :vulnerability_reads,
batch_column: :vulnerability_id,
sub_batch_size: vulnerability_reads.count,
pause_ms: 0,
connection: ActiveRecord::Base.connection
).perform
end
context 'with owasp top 10 data' do
include_context 'with vulnerability data' do
let(:external_id) { 'A1:2017-Injection' }
let(:external_type) { 'owasp' }
let(:identifier_name) { 'Injection' }
end
it 'updates vulnerability_reads' do
expect { perform_migration }.to change { vulnerability_read_1.reload.owasp_top_10 }
.from(nil).to(1)
.and not_change { vulnerability_read_2.reload.owasp_top_10 }.from(nil)
end
it 'updates vulnerability_reads with correct mapping' do
vuln_identifier_2 = create_identifier(external_id: 'A1:2021', external_type: 'owasp', name: 'A1 2021')
vuln_identifier_3 = create_identifier
vuln_finding_2 = create_finding(vulnerability_id: vulnerability_2.id,
primary_identifier_id: vuln_identifier_2.id)
vuln_finding_3 = create_finding(vulnerability_id: vulnerability_3.id,
primary_identifier_id: vuln_identifier_3.id)
create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding_2.id,
identifier_id: vuln_identifier_2.id)
create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding_3.id,
identifier_id: vuln_identifier_3.id)
perform_migration
expect(vulnerability_read_1.reload.owasp_top_10).to eq(1)
expect(vulnerability_read_2.reload.owasp_top_10).to eq(11)
expect(vulnerability_read_3.reload.owasp_top_10).to be_nil
end
end
context 'with incorrect owasp top 10 data' do
include_context 'with vulnerability data'
shared_examples 'does not update vulnerability_reads' do
it do
perform_migration
expect(vulnerability_read_1.reload.owasp_top_10).to be_nil
expect(vulnerability_read_2.reload.owasp_top_10).to be_nil
expect(vulnerability_read_3.reload.owasp_top_10).to be_nil
end
end
context 'with incorrect long format external_id' do
let(:external_id) { 'A1:2015-Injection' }
let(:external_type) { 'owasp' }
let(:identifier_name) { 'Injection' }
it_behaves_like 'does not update vulnerability_reads'
end
context 'with incorrect short format external_id' do
let(:external_id) { 'A1' }
let(:external_type) { 'owasp' }
let(:identifier_name) { 'Injection' }
it_behaves_like 'does not update vulnerability_reads'
end
context 'with incorrect external_type' do
let(:external_id) { 'A1:2017' }
let(:external_type) { 'owasp2017' }
let(:identifier_name) { 'Injection' }
it_behaves_like 'does not update vulnerability_reads'
end
end
context 'with no vulnerability identifiers match' do
include_context 'with vulnerability data' do
let(:external_id) { 'CVE-2018-1234' }
let(:external_type) { 'CVE' }
let(:identifier_name) { 'CVE-2018-1234' }
end
it 'does not update vulnerability_reads' do
perform_migration
expect(vulnerability_reads.where.not(owasp_top_10: nil).count).to eq(0)
end
end
end
private
def create_vulnerability(overrides = {})
attrs = {
project_id: project.id,
author_id: user.id,
title: 'test',
severity: 1,
confidence: 1,
report_type: 1
}.merge(overrides)
vulnerabilities.create!(attrs)
end
def create_vulnerability_read(overrides = {})
attrs = {
project_id: project.id,
vulnerability_id: 1,
scanner_id: scanner.id,
severity: 1,
report_type: 1,
state: 1,
uuid: SecureRandom.uuid
}.merge(overrides)
vulnerability_reads.create!(attrs)
end
def create_finding(overrides = {})
attrs = {
project_id: project.id,
scanner_id: scanner.id,
severity: 5, # medium
confidence: 2, # unknown,
report_type: 99, # generic
primary_identifier_id: create_identifier.id,
project_fingerprint: SecureRandom.hex(20),
location_fingerprint: SecureRandom.hex(20),
uuid: SecureRandom.uuid,
name: "CVE-2018-1234",
raw_metadata: "{}",
metadata_version: "test:1.0"
}.merge(overrides)
vulnerability_findings.create!(attrs)
end
def create_identifier(overrides = {})
attrs = {
project_id: project.id,
external_id: "CVE-2018-1234",
external_type: "CVE",
name: "CVE-2018-1234",
fingerprint: SecureRandom.hex(20)
}.merge(overrides)
vulnerability_identifiers.create!(attrs)
end
def create_vulnerability_occurrence_identifier(overrides = {})
time = Time.now.utc
attrs = {
created_at: time,
updated_at: time,
occurrence_id: nil,
identifier_id: nil
}.merge(overrides)
vulnerability_occurrence_identifiers.create!(attrs)
end
def checksum(value)
sha = Digest::SHA256.hexdigest(value)
Gitlab::Database::ShaAttribute.new.serialize(sha)
end
end

View File

@ -104,40 +104,4 @@ RSpec.describe Gitlab::HTTP, feature_category: :shared do
end
end
end
context 'when the FF use_gitlab_http_v2 is disabled' do
before do
stub_feature_flags(use_gitlab_http_v2: false)
end
describe '.get' do
it 'calls Gitlab::LegacyHTTP.get with default options' do
expect(Gitlab::LegacyHTTP).to receive(:get).with('/path', {})
described_class.get('/path')
end
end
describe '.try_get' do
it 'calls .get' do
expect(described_class).to receive(:get).with('/path', {})
described_class.try_get('/path')
end
it 'returns nil when .get raises an error' do
expect(described_class).to receive(:get).and_raise(SocketError)
expect(described_class.try_get('/path')).to be_nil
end
end
describe '.perform_request' do
it 'calls Gitlab::LegacyHTTP.perform_request with default options' do
expect(Gitlab::LegacyHTTP).to receive(:perform_request).with(Net::HTTP::Get, '/path', {})
described_class.perform_request(Net::HTTP::Get, '/path', {})
end
end
end
end

View File

@ -1,448 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::LegacyHTTP, feature_category: :shared do
include StubRequests
let(:default_options) { Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS }
context 'when allow_local_requests' do
it 'sends the request to the correct URI' do
stub_full_request('https://example.org:8080', ip_address: '8.8.8.8').to_return(status: 200)
described_class.get('https://example.org:8080', allow_local_requests: false)
expect(WebMock).to have_requested(:get, 'https://8.8.8.8:8080').once
end
end
context 'when not allow_local_requests' do
it 'sends the request to the correct URI' do
stub_full_request('https://example.org:8080')
described_class.get('https://example.org:8080', allow_local_requests: true)
expect(WebMock).to have_requested(:get, 'https://8.8.8.9:8080').once
end
end
context 'when reading the response is too slow' do
before_all do
# Override Net::HTTP to add a delay between sending each response chunk
mocked_http = Class.new(Net::HTTP) do
def request(*)
super do |response|
response.instance_eval do
def read_body(*)
mock_stream = @body.split(' ')
mock_stream.each do |fragment|
sleep 0.002.seconds
yield fragment if block_given?
end
@body
end
end
yield response if block_given?
response
end
end
end
@original_net_http = Net.send(:remove_const, :HTTP)
@webmock_net_http = WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_get(:@webMockNetHTTP)
Net.send(:const_set, :HTTP, mocked_http)
WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, mocked_http)
# Reload Gitlab::NetHttpAdapter
Gitlab.send(:remove_const, :NetHttpAdapter)
load "#{Rails.root}/lib/gitlab/net_http_adapter.rb"
end
before do
stub_const("Gitlab::HTTP::DEFAULT_READ_TOTAL_TIMEOUT", 0.001.seconds)
WebMock.stub_request(:post, /.*/).to_return do
{ body: "chunk-1 chunk-2", status: 200 }
end
end
after(:all) do
Net.send(:remove_const, :HTTP)
Net.send(:const_set, :HTTP, @original_net_http)
WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, @webmock_net_http)
# Reload Gitlab::NetHttpAdapter
Gitlab.send(:remove_const, :NetHttpAdapter)
load "#{Rails.root}/lib/gitlab/net_http_adapter.rb"
end
let(:options) { {} }
subject(:request_slow_responder) { described_class.post('http://example.org', **options) }
it 'raises an error' do
expect { request_slow_responder }.to raise_error(
Gitlab::HTTP::ReadTotalTimeout, /Request timed out after ?([0-9]*[.])?[0-9]+ seconds/)
end
context 'and timeout option is greater than DEFAULT_READ_TOTAL_TIMEOUT' do
let(:options) { { timeout: 10.seconds } }
it 'does not raise an error' do
expect { request_slow_responder }.not_to raise_error
end
end
context 'and stream_body option is truthy' do
let(:options) { { stream_body: true } }
it 'does not raise an error' do
expect { request_slow_responder }.not_to raise_error
end
end
end
it 'calls a block' do
WebMock.stub_request(:post, /.*/)
expect { |b| described_class.post('http://example.org', &b) }.to yield_with_args
end
describe 'allow_local_requests_from_web_hooks_and_services is' do
before do
WebMock.stub_request(:get, /.*/).to_return(status: 200, body: 'Success')
end
context 'disabled' do
before do
allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(false)
end
it 'deny requests to localhost' do
expect { described_class.get('http://localhost:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError)
end
it 'deny requests to private network' do
expect { described_class.get('http://192.168.1.2:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError)
end
context 'if allow_local_requests set to true' do
it 'override the global value and allow requests to localhost or private network' do
stub_full_request('http://localhost:3003')
expect { described_class.get('http://localhost:3003', allow_local_requests: true) }.not_to raise_error
end
end
end
context 'enabled' do
before do
allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(true)
end
it 'allow requests to localhost' do
stub_full_request('http://localhost:3003')
expect { described_class.get('http://localhost:3003') }.not_to raise_error
end
it 'allow requests to private network' do
expect { described_class.get('http://192.168.1.2:3003') }.not_to raise_error
end
context 'if allow_local_requests set to false' do
it 'override the global value and ban requests to localhost or private network' do
expect { described_class.get('http://localhost:3003', allow_local_requests: false) }.to raise_error(
Gitlab::HTTP::BlockedUrlError)
end
end
end
end
describe 'handle redirect loops' do
before do
stub_full_request("http://example.org", method: :any).to_raise(
HTTParty::RedirectionTooDeep.new("Redirection Too Deep"))
end
it 'handles GET requests' do
expect { described_class.get('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
end
it 'handles POST requests' do
expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
end
it 'handles PUT requests' do
expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
end
it 'handles DELETE requests' do
expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
end
it 'handles HEAD requests' do
expect { described_class.head('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
end
end
describe 'setting default timeouts' do
before do
stub_full_request('http://example.org', method: :any)
end
context 'when no timeouts are set' do
it 'sets default open and read and write timeouts' do
expect(described_class).to receive(:httparty_perform_request).with(
Net::HTTP::Get, 'http://example.org', default_options
).and_call_original
described_class.get('http://example.org')
end
end
context 'when :timeout is set' do
it 'does not set any default timeouts' do
expect(described_class).to receive(:httparty_perform_request).with(
Net::HTTP::Get, 'http://example.org', { timeout: 1 }
).and_call_original
described_class.get('http://example.org', { timeout: 1 })
end
end
context 'when :open_timeout is set' do
it 'only sets default read and write timeout' do
expect(described_class).to receive(:httparty_perform_request).with(
Net::HTTP::Get, 'http://example.org', default_options.merge(open_timeout: 1)
).and_call_original
described_class.get('http://example.org', open_timeout: 1)
end
end
context 'when :read_timeout is set' do
it 'only sets default open and write timeout' do
expect(described_class).to receive(:httparty_perform_request).with(
Net::HTTP::Get, 'http://example.org', default_options.merge(read_timeout: 1)
).and_call_original
described_class.get('http://example.org', read_timeout: 1)
end
end
context 'when :write_timeout is set' do
it 'only sets default open and read timeout' do
expect(described_class).to receive(:httparty_perform_request).with(
Net::HTTP::Put, 'http://example.org', default_options.merge(write_timeout: 1)
).and_call_original
described_class.put('http://example.org', write_timeout: 1)
end
end
end
describe '.try_get' do
let(:path) { 'http://example.org' }
let(:extra_log_info_proc) do
proc do |error, url, options|
{ klass: error.class, url: url, options: options }
end
end
let(:request_options) do
default_options.merge({
verify: false,
basic_auth: { username: 'user', password: 'pass' }
})
end
Gitlab::HTTP::HTTP_ERRORS.each do |exception_class|
context "with #{exception_class}" do
let(:klass) { exception_class }
context 'with path' do
before do
expect(described_class).to receive(:httparty_perform_request)
.with(Net::HTTP::Get, path, default_options)
.and_raise(klass)
end
it 'handles requests without extra_log_info' do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(instance_of(klass), {})
expect(described_class.try_get(path)).to be_nil
end
it 'handles requests with extra_log_info as hash' do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(instance_of(klass), { a: :b })
expect(described_class.try_get(path, extra_log_info: { a: :b })).to be_nil
end
it 'handles requests with extra_log_info as proc' do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(instance_of(klass), { url: path, klass: klass, options: {} })
expect(described_class.try_get(path, extra_log_info: extra_log_info_proc)).to be_nil
end
end
context 'with path and options' do
before do
expect(described_class).to receive(:httparty_perform_request)
.with(Net::HTTP::Get, path, request_options)
.and_raise(klass)
end
it 'handles requests without extra_log_info' do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(instance_of(klass), {})
expect(described_class.try_get(path, request_options)).to be_nil
end
it 'handles requests with extra_log_info as hash' do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(instance_of(klass), { a: :b })
expect(described_class.try_get(path, **request_options, extra_log_info: { a: :b })).to be_nil
end
it 'handles requests with extra_log_info as proc' do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(instance_of(klass), { klass: klass, url: path, options: request_options })
expect(described_class.try_get(path, **request_options, extra_log_info: extra_log_info_proc)).to be_nil
end
end
context 'with path, options, and block' do
let(:block) do
proc {}
end
before do
expect(described_class).to receive(:httparty_perform_request)
.with(Net::HTTP::Get, path, request_options, &block)
.and_raise(klass)
end
it 'handles requests without extra_log_info' do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(instance_of(klass), {})
expect(described_class.try_get(path, request_options, &block)).to be_nil
end
it 'handles requests with extra_log_info as hash' do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(instance_of(klass), { a: :b })
expect(described_class.try_get(path, **request_options, extra_log_info: { a: :b }, &block)).to be_nil
end
it 'handles requests with extra_log_info as proc' do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(instance_of(klass), { klass: klass, url: path, options: request_options })
expect(
described_class.try_get(path, **request_options, extra_log_info: extra_log_info_proc, &block)
).to be_nil
end
end
end
end
end
describe 'silent mode', feature_category: :geo_replication do
before do
stub_full_request("http://example.org", method: :any)
stub_application_setting(silent_mode_enabled: silent_mode)
end
context 'when silent mode is enabled' do
let(:silent_mode) { true }
it 'allows GET requests' do
expect { described_class.get('http://example.org') }.not_to raise_error
end
it 'allows HEAD requests' do
expect { described_class.head('http://example.org') }.not_to raise_error
end
it 'allows OPTIONS requests' do
expect { described_class.options('http://example.org') }.not_to raise_error
end
it 'blocks POST requests' do
expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
end
it 'blocks PUT requests' do
expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
end
it 'blocks DELETE requests' do
expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
end
it 'logs blocked requests' do
expect(::Gitlab::AppJsonLogger).to receive(:info).with(
message: "Outbound HTTP request blocked",
outbound_http_request_method: 'Net::HTTP::Post',
silent_mode_enabled: true
)
expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
end
end
context 'when silent mode is disabled' do
let(:silent_mode) { false }
it 'allows GET requests' do
expect { described_class.get('http://example.org') }.not_to raise_error
end
it 'allows HEAD requests' do
expect { described_class.head('http://example.org') }.not_to raise_error
end
it 'allows OPTIONS requests' do
expect { described_class.options('http://example.org') }.not_to raise_error
end
it 'blocks POST requests' do
expect { described_class.post('http://example.org') }.not_to raise_error
end
it 'blocks PUT requests' do
expect { described_class.put('http://example.org') }.not_to raise_error
end
it 'blocks DELETE requests' do
expect { described_class.delete('http://example.org') }.not_to raise_error
end
end
end
end

View File

@ -4,7 +4,5 @@ require 'spec_helper'
require_migration!
RSpec.describe AddLinkedItemsWorkItemWidget, :migration, feature_category: :portfolio_management do
it_behaves_like 'migration that adds widget to work items definitions', widget_name: 'Linked items' do
let(:work_item_type_count) { 8 }
end
it_behaves_like 'migration that adds widget to work items definitions', widget_name: 'Linked items'
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe AddWorkItemsRolledupDatesWidget, :migration, feature_category: :team_planning do
it_behaves_like 'migration that adds widget to work items definitions',
widget_name: 'Rolledup dates'
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillOwaspTopTenOfVulnerabilityReads, feature_category: :vulnerability_management do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :vulnerability_reads,
column_name: :vulnerability_id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
end
end

View File

@ -102,65 +102,18 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.not_to allow_value(nil).for(:protected_paths_for_get_request) }
it { is_expected.to allow_value([]).for(:protected_paths_for_get_request) }
it { is_expected.to allow_value(3).for(:push_event_hooks_limit) }
it { is_expected.not_to allow_value('three').for(:push_event_hooks_limit) }
it { is_expected.not_to allow_value(nil).for(:push_event_hooks_limit) }
it { is_expected.to allow_value(3).for(:push_event_activities_limit) }
it { is_expected.not_to allow_value('three').for(:push_event_activities_limit) }
it { is_expected.not_to allow_value(nil).for(:push_event_activities_limit) }
it { is_expected.to validate_numericality_of(:container_registry_delete_tags_service_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_cleanup_tags_service_max_list_size).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_data_repair_detail_worker_max_concurrency).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_expiration_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_inclusion_of(:container_registry_expiration_policies_caching).in_array([true, false]) }
it { is_expected.to validate_numericality_of(:container_registry_import_max_tags_count).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_import_max_retries).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_import_start_max_retries).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_import_max_step_duration).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_pre_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_pre_import_tags_rate).is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:container_registry_data_repair_detail_worker_max_concurrency) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_tags_count) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_retries) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_start_max_retries) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_step_duration) }
it { is_expected.not_to allow_value(nil).for(:container_registry_pre_import_timeout) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_timeout) }
it { is_expected.not_to allow_value(nil).for(:container_registry_pre_import_tags_rate) }
it { is_expected.to allow_value(1.5).for(:container_registry_pre_import_tags_rate) }
it { is_expected.to validate_presence_of(:container_registry_import_target_plan) }
it { is_expected.to validate_presence_of(:container_registry_import_created_before) }
it { is_expected.to validate_numericality_of(:decompress_archive_file_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:decompress_archive_file_timeout) }
it { is_expected.to validate_numericality_of(:dependency_proxy_ttl_group_policy_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:dependency_proxy_ttl_group_policy_worker_capacity) }
it { is_expected.to validate_numericality_of(:packages_cleanup_package_file_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:packages_cleanup_package_file_worker_capacity) }
it { is_expected.to validate_numericality_of(:package_registry_cleanup_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:package_registry_cleanup_policies_worker_capacity) }
it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
it { is_expected.to validate_numericality_of(:wiki_page_max_content_bytes).only_integer.is_greater_than_or_equal_to(1024) }
it { is_expected.to validate_inclusion_of(:wiki_asciidoc_allow_uri_includes).in_array([true, false]) }
it { is_expected.to validate_presence_of(:max_artifacts_size) }
it { is_expected.to validate_numericality_of(:max_artifacts_size).only_integer.is_greater_than(0) }
it { is_expected.to validate_presence_of(:max_yaml_size_bytes) }
it { is_expected.to validate_numericality_of(:max_yaml_size_bytes).only_integer.is_greater_than(0) }
it { is_expected.to validate_presence_of(:max_yaml_depth) }
it { is_expected.to validate_numericality_of(:max_yaml_depth).only_integer.is_greater_than(0) }
it { is_expected.to validate_presence_of(:max_pages_size) }
it { is_expected.to validate_presence_of(:max_pages_custom_domains_per_project) }
it { is_expected.to validate_presence_of(:max_terraform_state_size_bytes) }
it { is_expected.to validate_numericality_of(:max_terraform_state_size_bytes).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_inclusion_of(:user_defaults_to_private_profile).in_array([true, false]) }
@ -175,40 +128,12 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
.is_less_than(::Gitlab::Pages::MAX_SIZE / 1.megabyte)
end
it 'ensures max_pages_custom_domains_per_project is an integer greater than 0 (or equal to 0 to indicate unlimited/maximum)' do
is_expected
.to validate_numericality_of(:max_pages_custom_domains_per_project)
.only_integer
.is_greater_than_or_equal_to(0)
end
it { is_expected.to validate_presence_of(:jobs_per_stage_page_size) }
it { is_expected.to validate_numericality_of(:jobs_per_stage_page_size).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(7).for(:minimum_password_length) }
it { is_expected.not_to allow_value(129).for(:minimum_password_length) }
it { is_expected.not_to allow_value(nil).for(:minimum_password_length) }
it { is_expected.not_to allow_value('abc').for(:minimum_password_length) }
it { is_expected.to allow_value(10).for(:minimum_password_length) }
it { is_expected.to allow_value(300).for(:issues_create_limit) }
it { is_expected.not_to allow_value('three').for(:issues_create_limit) }
it { is_expected.not_to allow_value(nil).for(:issues_create_limit) }
it { is_expected.not_to allow_value(10.5).for(:issues_create_limit) }
it { is_expected.not_to allow_value(-1).for(:issues_create_limit) }
it { is_expected.to allow_value(0).for(:raw_blob_request_limit) }
it { is_expected.not_to allow_value('abc').for(:raw_blob_request_limit) }
it { is_expected.not_to allow_value(nil).for(:raw_blob_request_limit) }
it { is_expected.not_to allow_value(10.5).for(:raw_blob_request_limit) }
it { is_expected.not_to allow_value(-1).for(:raw_blob_request_limit) }
it { is_expected.to allow_value(0).for(:pipeline_limit_per_project_user_sha) }
it { is_expected.not_to allow_value('abc').for(:pipeline_limit_per_project_user_sha) }
it { is_expected.not_to allow_value(nil).for(:pipeline_limit_per_project_user_sha) }
it { is_expected.not_to allow_value(10.5).for(:pipeline_limit_per_project_user_sha) }
it { is_expected.not_to allow_value(-1).for(:pipeline_limit_per_project_user_sha) }
it { is_expected.not_to allow_value(false).for(:hashed_storage_enabled) }
it { is_expected.to allow_value('default' => 0).for(:repository_storages_weighted) }
@ -220,15 +145,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.not_to allow_value('default' => 101).for(:repository_storages_weighted).with_message("value for 'default' must be between 0 and 100") }
it { is_expected.not_to allow_value('default' => 100, shouldntexist: 50).for(:repository_storages_weighted).with_message("can't include: shouldntexist") }
%i[notes_create_limit search_rate_limit search_rate_limit_unauthenticated users_get_by_id_limit
projects_api_rate_limit_unauthenticated gitlab_shell_operation_limit].each do |setting|
it { is_expected.to allow_value(400).for(setting) }
it { is_expected.not_to allow_value('two').for(setting) }
it { is_expected.not_to allow_value(nil).for(setting) }
it { is_expected.not_to allow_value(5.5).for(setting) }
it { is_expected.not_to allow_value(-2).for(setting) }
end
def many_usernames(num = 100)
Array.new(num) { |i| "username#{i}" }
end
@ -281,24 +197,132 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_inclusion_of(:silent_mode_enabled).in_array([true, false]) }
it { is_expected.to allow_value(0).for(:ci_max_includes) }
it { is_expected.to allow_value(200).for(:ci_max_includes) }
it { is_expected.not_to allow_value('abc').for(:ci_max_includes) }
it { is_expected.not_to allow_value(nil).for(:ci_max_includes) }
it { is_expected.not_to allow_value(10.5).for(:ci_max_includes) }
it { is_expected.not_to allow_value(-1).for(:ci_max_includes) }
context 'for non-null integer attributes starting from 0' do
where(:attribute) do
%i[
bulk_import_max_download_file_size
ci_max_includes
ci_max_total_yaml_size_bytes
container_registry_cleanup_tags_service_max_list_size
container_registry_data_repair_detail_worker_max_concurrency
container_registry_delete_tags_service_timeout
container_registry_expiration_policies_worker_capacity
container_registry_import_max_retries
container_registry_import_max_step_duration
container_registry_import_max_tags_count
container_registry_import_start_max_retries
container_registry_import_timeout
container_registry_pre_import_timeout
decompress_archive_file_timeout
dependency_proxy_ttl_group_policy_worker_capacity
gitlab_shell_operation_limit
inactive_projects_min_size_mb
issues_create_limit
jobs_per_stage_page_size
max_decompressed_archive_size
max_export_size
max_import_remote_file_size
max_import_size
max_pages_custom_domains_per_project
max_terraform_state_size_bytes
package_registry_cleanup_policies_worker_capacity
packages_cleanup_package_file_worker_capacity
pipeline_limit_per_project_user_sha
projects_api_rate_limit_unauthenticated
raw_blob_request_limit
search_rate_limit
search_rate_limit_unauthenticated
session_expire_delay
sidekiq_job_limiter_compression_threshold_bytes
sidekiq_job_limiter_limit_bytes
terminal_max_session_time
users_get_by_id_limit
notes_create_limit
]
end
it { is_expected.to allow_value(0).for(:ci_max_total_yaml_size_bytes) }
it { is_expected.to allow_value(200).for(:ci_max_total_yaml_size_bytes) }
it { is_expected.not_to allow_value('abc').for(:ci_max_total_yaml_size_bytes) }
it { is_expected.not_to allow_value(nil).for(:ci_max_total_yaml_size_bytes) }
it { is_expected.not_to allow_value(10.5).for(:ci_max_total_yaml_size_bytes) }
it { is_expected.not_to allow_value(-1).for(:ci_max_total_yaml_size_bytes) }
with_them do
it { is_expected.to validate_numericality_of(attribute).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(attribute) }
end
end
context 'for non-null numerical attributes starting from 0' do
where(:attribute) do
%i[
push_event_hooks_limit
push_event_activities_limit
]
end
with_them do
it { is_expected.to validate_numericality_of(attribute).is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(attribute) }
end
end
context 'for non-null integer attributes starting from 1' do
where(:attribute) do
%i[
max_attachment_size
max_artifacts_size
container_registry_token_expire_delay
housekeeping_optimize_repository_period
bulk_import_concurrent_pipeline_batch_limit
snippet_size_limit
max_yaml_size_bytes
max_yaml_depth
namespace_aggregation_schedule_lease_duration_in_seconds
throttle_unauthenticated_api_requests_per_period
throttle_unauthenticated_api_period_in_seconds
throttle_unauthenticated_requests_per_period
throttle_unauthenticated_period_in_seconds
throttle_unauthenticated_packages_api_requests_per_period
throttle_unauthenticated_packages_api_period_in_seconds
throttle_unauthenticated_files_api_requests_per_period
throttle_unauthenticated_files_api_period_in_seconds
throttle_unauthenticated_deprecated_api_requests_per_period
throttle_unauthenticated_deprecated_api_period_in_seconds
throttle_authenticated_api_requests_per_period
throttle_authenticated_api_period_in_seconds
throttle_authenticated_git_lfs_requests_per_period
throttle_authenticated_git_lfs_period_in_seconds
throttle_authenticated_web_requests_per_period
throttle_authenticated_web_period_in_seconds
throttle_authenticated_packages_api_requests_per_period
throttle_authenticated_packages_api_period_in_seconds
throttle_authenticated_files_api_requests_per_period
throttle_authenticated_files_api_period_in_seconds
throttle_authenticated_deprecated_api_requests_per_period
throttle_authenticated_deprecated_api_period_in_seconds
throttle_protected_paths_requests_per_period
throttle_protected_paths_period_in_seconds
project_jobs_api_rate_limit
]
end
with_them do
it { is_expected.to validate_numericality_of(attribute).only_integer.is_greater_than(0) }
it { is_expected.not_to allow_value(nil).for(attribute) }
end
end
context 'for null integer attributes starting from 1' do
where(:attribute) do
%i[
failed_login_attempts_unlock_period_in_minutes
external_pipeline_validation_service_timeout
max_login_attempts
]
end
with_them do
it { is_expected.to validate_numericality_of(attribute).only_integer.is_greater_than(0).allow_nil }
end
end
it { is_expected.to validate_inclusion_of(:remember_me_enabled).in_array([true, false]) }
it { is_expected.to validate_numericality_of(:namespace_aggregation_schedule_lease_duration_in_seconds).only_integer.is_greater_than(0) }
it { is_expected.to validate_inclusion_of(:instance_level_code_suggestions_enabled).in_array([true, false]) }
it { is_expected.to validate_inclusion_of(:package_registry_allow_anyone_to_pull_option).in_array([true, false]) }
@ -587,66 +611,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
end
it { is_expected.to validate_presence_of(:max_attachment_size) }
specify do
is_expected.to validate_numericality_of(:max_attachment_size)
.only_integer
.is_greater_than(0)
end
it { is_expected.to validate_presence_of(:max_export_size) }
specify do
is_expected.to validate_numericality_of(:max_export_size)
.only_integer
.is_greater_than_or_equal_to(0)
end
it { is_expected.to validate_presence_of(:max_import_size) }
specify do
is_expected.to validate_numericality_of(:max_import_size)
.only_integer
.is_greater_than_or_equal_to(0)
end
it { is_expected.to validate_presence_of(:max_import_remote_file_size) }
specify do
is_expected.to validate_numericality_of(:max_import_remote_file_size)
.only_integer
.is_greater_than_or_equal_to(0)
end
it { is_expected.to validate_presence_of(:bulk_import_max_download_file_size) }
specify do
is_expected.to validate_numericality_of(:bulk_import_max_download_file_size)
.only_integer
.is_greater_than_or_equal_to(0)
end
it { is_expected.to validate_presence_of(:max_decompressed_archive_size) }
specify do
is_expected.to validate_numericality_of(:max_decompressed_archive_size)
.only_integer
.is_greater_than_or_equal_to(0)
end
specify do
is_expected.to validate_numericality_of(:failed_login_attempts_unlock_period_in_minutes)
.only_integer
.is_greater_than(0)
end
specify do
is_expected.to validate_numericality_of(:max_login_attempts)
.only_integer
.is_greater_than(0)
end
specify do
is_expected.to validate_numericality_of(:local_markdown_version)
.only_integer
@ -880,10 +844,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
end
context 'housekeeping settings' do
it { is_expected.not_to allow_value(0).for(:housekeeping_optimize_repository_period) }
end
context 'gitaly timeouts' do
it "validates that the default_timeout is lower than the max_request_duration" do
is_expected.to validate_numericality_of(:gitaly_timeout_default)
@ -1003,8 +963,8 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it 'the credentials are valid when the private key can be read and matches the certificate' do
tls_attributes = [:external_auth_client_key_pass,
:external_auth_client_key,
:external_auth_client_cert]
:external_auth_client_key,
:external_auth_client_cert]
setting.external_auth_client_key = File.read('spec/fixtures/passphrase_x509_certificate_pk.key')
setting.external_auth_client_key_pass = '5iveL!fe'
@ -1216,43 +1176,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
end
context 'throttle_* settings' do
where(:throttle_setting) do
%i[
throttle_unauthenticated_api_requests_per_period
throttle_unauthenticated_api_period_in_seconds
throttle_unauthenticated_requests_per_period
throttle_unauthenticated_period_in_seconds
throttle_authenticated_api_requests_per_period
throttle_authenticated_api_period_in_seconds
throttle_authenticated_web_requests_per_period
throttle_authenticated_web_period_in_seconds
throttle_unauthenticated_packages_api_requests_per_period
throttle_unauthenticated_packages_api_period_in_seconds
throttle_authenticated_packages_api_requests_per_period
throttle_authenticated_packages_api_period_in_seconds
throttle_unauthenticated_files_api_requests_per_period
throttle_unauthenticated_files_api_period_in_seconds
throttle_authenticated_files_api_requests_per_period
throttle_authenticated_files_api_period_in_seconds
throttle_unauthenticated_deprecated_api_requests_per_period
throttle_unauthenticated_deprecated_api_period_in_seconds
throttle_authenticated_deprecated_api_requests_per_period
throttle_authenticated_deprecated_api_period_in_seconds
throttle_authenticated_git_lfs_requests_per_period
throttle_authenticated_git_lfs_period_in_seconds
]
end
with_them do
it { is_expected.to allow_value(3).for(throttle_setting) }
it { is_expected.not_to allow_value(-3).for(throttle_setting) }
it { is_expected.not_to allow_value(0).for(throttle_setting) }
it { is_expected.not_to allow_value('three').for(throttle_setting) }
it { is_expected.not_to allow_value(nil).for(throttle_setting) }
end
end
context 'sidekiq job limiter settings' do
it 'has the right defaults', :aggregate_failures do
expect(setting.sidekiq_job_limiter_mode).to eq('compress')
@ -1263,8 +1186,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
it { is_expected.to allow_value('track').for(:sidekiq_job_limiter_mode) }
it { is_expected.to validate_numericality_of(:sidekiq_job_limiter_compression_threshold_bytes).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:sidekiq_job_limiter_limit_bytes).only_integer.is_greater_than_or_equal_to(0) }
end
context 'prometheus settings' do
@ -1353,13 +1274,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
.with_message("must be a value between 0 and 1")
end
end
describe 'bulk_import_concurrent_pipeline_batch_limit' do
it do
is_expected.to validate_numericality_of(:bulk_import_concurrent_pipeline_batch_limit)
.is_greater_than(0)
end
end
end
context 'restrict creating duplicates' do
@ -1715,8 +1629,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_numericality_of(:inactive_projects_delete_after_months).is_greater_than(0) }
it { is_expected.to validate_numericality_of(:inactive_projects_min_size_mb).is_greater_than_or_equal_to(0) }
it "deletes the redis key used for tracking inactive projects deletion warning emails when setting is updated",
:clean_gitlab_redis_shared_state do
Gitlab::Redis::SharedState.with do |redis|

View File

@ -74,22 +74,6 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
apply_project_authorization_changes
end
context 'when feature flag "user_approval_rules_removal" is disabled' do
before do
stub_feature_flags(user_approval_rules_removal: false)
end
it 'does not publish a AuthorizationsRemovedEvent event' do
expect(::Gitlab::EventStore).not_to(
receive(:publish_group).with(
array_including(an_instance_of(::ProjectAuthorizations::AuthorizationsRemovedEvent))
)
)
apply_project_authorization_changes
end
end
end
shared_examples_for 'publishes AuthorizationsAddedEvent' do

View File

@ -27,7 +27,8 @@ RSpec.describe WorkItems::WidgetDefinition, feature_category: :team_planning do
::WorkItems::Widgets::Progress,
::WorkItems::Widgets::RequirementLegacy,
::WorkItems::Widgets::TestReports,
::WorkItems::Widgets::Color
::WorkItems::Widgets::Color,
::WorkItems::Widgets::RolledupDates
]
end

View File

@ -3,7 +3,7 @@
RSpec.shared_examples 'migration that adds widget to work items definitions' do |widget_name:|
let(:migration) { described_class.new }
let(:work_item_definitions) { table(:work_item_widget_definitions) }
let(:work_item_type_count) { 7 }
let(:work_item_type_count) { described_class::WORK_ITEM_TYPES.size }
describe '#up' do
it "creates widget definition in all types" do
@ -14,11 +14,13 @@ RSpec.shared_examples 'migration that adds widget to work items definitions' do
end
it 'logs a warning if the type is missing' do
type_name = described_class::WORK_ITEM_TYPES.first
allow(described_class::WorkItemType).to receive(:find_by_name_and_namespace_id).and_call_original
allow(described_class::WorkItemType).to receive(:find_by_name_and_namespace_id)
.with('Issue', nil).and_return(nil)
.with(type_name, nil).and_return(nil)
expect(Gitlab::AppLogger).to receive(:warn).with('type Issue is missing, not adding widget')
expect(Gitlab::AppLogger).to receive(:warn).with("type #{type_name} is missing, not adding widget")
migrate!
end
end