Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-12-06 03:14:51 +00:00
parent 1e2aa980a7
commit 313ce461ca
69 changed files with 1603 additions and 285 deletions

View File

@ -1098,6 +1098,7 @@ RSpec/BeforeAllRoleAssignment:
- 'spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/commit_notes_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/legacy_references_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb'

View File

@ -1770,6 +1770,7 @@ RSpec/NamedSubject:
- 'spec/lib/bulk_imports/projects/graphql/get_project_query_spec.rb'
- 'spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb'
- 'spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/legacy_references_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/stage_spec.rb'
- 'spec/lib/bulk_imports/source_url_builder_spec.rb'

View File

@ -65,6 +65,8 @@ PATH
remote: gems/gitlab-secret_detection
specs:
gitlab-secret_detection (0.1.0)
re2 (~> 2.4)
toml-rb (~> 2.2)
PATH
remote: gems/gitlab-utils

View File

@ -8,7 +8,7 @@ import AbuseReportCommentForm from './abuse_report_comment_form.vue';
export default {
name: 'AbuseReportAddNote',
i18n: {
reply: __('Reply'),
reply: __('Reply'),
replyToComment: __('Reply to comment'),
commentError: __('Your comment could not be submitted because %{reason}.'),
genericError: __(
@ -139,7 +139,7 @@ export default {
v-else
ref="textarea"
rows="1"
class="reply-placeholder-text-field gl-font-regular!"
class="reply-placeholder-text-field"
data-testid="abuse-report-note-reply-textarea"
:placeholder="$options.i18n.reply"
:aria-label="$options.i18n.replyToComment"

View File

@ -85,7 +85,7 @@ export default {
};
</script>
<template>
<aside class="right-sidebar build-sidebar" data-offset-top="101" data-spy="affix">
<aside class="right-sidebar build-sidebar">
<div class="sidebar-container">
<div class="blocks-container gl-p-4 gl-pt-0">
<sidebar-header

View File

@ -1,7 +1,7 @@
query getCustomEmoji($groupPath: ID!) {
group(fullPath: $groupPath) {
id
customEmoji {
customEmoji(includeAncestorGroups: true) {
nodes {
id
name

View File

@ -18,6 +18,18 @@ export default {
required: false,
default: false,
},
useH1: {
type: Boolean,
default: false,
required: false,
},
},
computed: {
headerClasses() {
return this.useH1
? 'gl-w-full gl-font-size-h-display gl-m-0!'
: 'gl-font-weight-normal gl-sm-font-weight-bold gl-mb-1 gl-mt-0 gl-w-full';
},
},
methods: {
handleBlur({ target }) {
@ -39,9 +51,10 @@ export default {
</script>
<template>
<h2
class="gl-font-weight-normal gl-sm-font-weight-bold gl-mb-1 gl-mt-0 gl-w-full"
:class="{ 'gl-cursor-text': disabled }"
<component
:is="useH1 ? 'h1' : 'h2'"
class="gl-w-full"
:class="[{ 'gl-cursor-text': disabled }, headerClasses]"
aria-labelledby="item-title"
>
<span
@ -64,5 +77,5 @@ export default {
@keydown.meta.b.prevent
>{{ title }}</span
>
</h2>
</component>
</template>

View File

@ -287,9 +287,9 @@ export default {
v-else
ref="textarea"
rows="1"
class="reply-placeholder-text-field gl-font-regular!"
class="reply-placeholder-text-field"
data-testid="note-reply-textarea"
:placeholder="__('Reply')"
:placeholder="__('Reply')"
:aria-label="__('Reply to comment')"
@focus="showReplyForm"
@click="showReplyForm"

View File

@ -36,6 +36,16 @@ export default {
default: WORK_ITEM_NOTES_FILTER_ALL_NOTES,
required: false,
},
useH2: {
type: Boolean,
default: false,
required: false,
},
},
computed: {
headerClasses() {
return this.useH2 ? 'gl-font-size-h1 gl-m-0' : 'gl-font-base gl-m-0';
},
},
methods: {
changeNotesSortOrder(direction) {
@ -58,7 +68,9 @@ export default {
<div
class="gl-display-flex gl-justify-content-space-between gl-flex-wrap gl-pb-3 gl-align-items-center"
>
<h3 class="gl-font-base gl-m-0">{{ $options.i18n.activityLabel }}</h3>
<component :is="useH2 ? 'h2' : 'h3'" :class="headerClasses">{{
$options.i18n.activityLabel
}}</component>
<div class="gl-display-flex gl-gap-3">
<work-item-activity-sort-filter
:work-item-type="workItemType"

View File

@ -492,6 +492,7 @@ export default {
:work-item-type="workItemType"
:work-item-parent-id="workItemParentId"
:can-update="canUpdate"
:use-h1="!isModal"
@error="updateError = $event"
/>
<work-item-created-updated
@ -582,6 +583,7 @@ export default {
:report-abuse-path="reportAbusePath"
:is-work-item-confidential="workItem.confidential"
class="gl-pt-5"
:use-h2="!isModal"
@error="updateError = $event"
@has-notes="updateHasNotes"
@openReportAbuse="openReportAbuseDrawer"

View File

@ -89,6 +89,11 @@ export default {
required: false,
default: false,
},
useH2: {
type: Boolean,
default: false,
required: false,
},
},
data() {
return {
@ -330,6 +335,7 @@ export default {
:disable-activity-filter-sort="disableActivityFilterSort"
:work-item-type="workItemType"
:discussion-filter="discussionFilter"
:use-h2="useH2"
@changeSort="changeNotesSortOrder"
@changeFilter="filterDiscussions"
/>

View File

@ -42,6 +42,11 @@ export default {
required: false,
default: false,
},
useH1: {
type: Boolean,
default: false,
required: false,
},
},
computed: {
tracking() {
@ -101,5 +106,10 @@ export default {
</script>
<template>
<item-title :title="workItemTitle" :disabled="!canUpdate" @title-changed="updateTitle" />
<item-title
:title="workItemTitle"
:disabled="!canUpdate"
:use-h1="useH1"
@title-changed="updateTitle"
/>
</template>

View File

@ -65,10 +65,6 @@
.avatar-container {
margin: 0 auto;
}
li.active:not(.fly-out-top-item) > a {
background-color: $indigo-900-alpha-008;
}
}
@mixin sub-level-items-flyout {

View File

@ -110,9 +110,6 @@ $t-gray-a-24: rgba($gray-950, 0.24) !default;
$white-dark: darken($gray-50, 2) !default;
// To do this variant right for darkmode, we need to create a variable for it.
$indigo-900-alpha-008: rgba($theme-indigo-900, 0.08);
$border-white-light: darken($white, $darken-border-factor) !default;
$border-white-normal: darken($gray-50, $darken-border-factor) !default;

View File

@ -82,6 +82,7 @@
.right-sidebar.build-sidebar {
padding: 0;
top: $calc-application-header-height;
@include media-breakpoint-up(lg) {
@include gl-border-l-0;
@ -92,9 +93,7 @@
}
.sidebar-container {
@include gl-sticky;
top: #{$top-bar-height - 1px};
max-height: calc(100vh - #{$top-bar-height - 1px} - var(--performance-bar-height));
max-height: 100%;
overflow-y: scroll;
overflow-x: hidden;
-webkit-overflow-scrolling: touch;

View File

@ -326,7 +326,6 @@ table {
.discussion-reply-holder {
.reply-placeholder-text-field {
@include gl-font-monospace;
border-radius: $gl-border-radius-base;
width: 100%;
resize: none;

View File

@ -8,19 +8,6 @@ $gray-dark: darken($gray-100, 2);
$gray-darker: darken($gray-200, 2);
$gray-darkest: $gray-700;
// $data-viz blue shades required for $calendar-activity-colors
$data-viz-blue-50: #2a2b59;
$data-viz-blue-100: #303470;
$data-viz-blue-200: #374291;
$data-viz-blue-300: #3f51ae;
$data-viz-blue-400: #4e65cd;
$data-viz-blue-500: #617ae2;
$data-viz-blue-600: #7992f5;
$data-viz-blue-700: #97acff;
$data-viz-blue-800: #b7c6ff;
$data-viz-blue-900: #d2dcff;
$data-viz-blue-950: #e9ebff;
// Some of the other $t-gray-a variables are used
// for borders and some other places, so we cannot override
// them. These are used only for box shadows so we can
@ -30,8 +17,6 @@ $t-gray-a-24: rgba($gray-10, 0.24);
$black-normal: $gray-900;
$white-dark: $gray-100;
$theme-indigo-50: #1a1a40;
$border-color: #4f4f4f;
$border-white-normal: $border-color;

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
module Mutations
module ContainerRegistry
module Protection
module Rule
class Delete < ::Mutations::BaseMutation
graphql_name 'DeleteContainerRegistryProtectionRule'
description 'Deletes a container registry protection rule. ' \
'Available only when feature flag `container_registry_protected_containers` is enabled.'
authorize :admin_container_image
argument :id,
::Types::GlobalIDType[::ContainerRegistry::Protection::Rule],
required: true,
description: 'Global ID of the container registry protection rule to delete.'
field :container_registry_protection_rule,
Types::ContainerRegistry::Protection::RuleType,
null: true,
description: 'Container registry protection rule that was deleted successfully.'
def resolve(id:, **_kwargs)
if Feature.disabled?(:container_registry_protected_containers)
raise_resource_not_available_error!("'container_registry_protected_containers' feature flag is disabled")
end
container_registry_protection_rule = authorized_find!(id: id)
response = ::ContainerRegistry::Protection::DeleteRuleService.new(container_registry_protection_rule,
current_user: current_user).execute
{ container_registry_protection_rule: response.payload[:container_registry_protection_rule],
errors: response.errors }
end
end
end
end
end
end

View File

@ -138,6 +138,7 @@ module Types
mount_mutation Mutations::DesignManagement::Update
mount_mutation Mutations::ContainerExpirationPolicies::Update
mount_mutation Mutations::ContainerRegistry::Protection::Rule::Create, alpha: { milestone: '16.6' }
mount_mutation Mutations::ContainerRegistry::Protection::Rule::Delete, alpha: { milestone: '16.7' }
mount_mutation Mutations::ContainerRepositories::Destroy
mount_mutation Mutations::ContainerRepositories::DestroyTags
mount_mutation Mutations::Ci::Catalog::Resources::Create, alpha: { milestone: '15.11' }

View File

@ -65,7 +65,7 @@ module NotesHelper
content_tag(
:textarea,
rows: 1,
placeholder: _('Reply...'),
placeholder: _('Reply'),
'aria-label': _('Reply to comment'),
class: 'reply-placeholder-text-field js-discussion-reply-button',
data: {

View File

@ -0,0 +1,45 @@
# frozen_string_literal: true
module ContainerRegistry
module Protection
class DeleteRuleService
include Gitlab::Allowable
def initialize(container_registry_protection_rule, current_user:)
if container_registry_protection_rule.blank? || current_user.blank?
raise ArgumentError,
'container_registry_protection_rule and current_user must be set'
end
@container_registry_protection_rule = container_registry_protection_rule
@current_user = current_user
end
def execute
unless can?(current_user, :admin_container_image, container_registry_protection_rule.project)
error_message = _('Unauthorized to delete a container registry protection rule')
return service_response_error(message: error_message)
end
deleted_container_registry_protection_rule = container_registry_protection_rule.destroy!
ServiceResponse.success(
payload: { container_registry_protection_rule: deleted_container_registry_protection_rule }
)
rescue StandardError => e
service_response_error(message: e.message)
end
private
attr_reader :container_registry_protection_rule, :current_user
def service_response_error(message:)
ServiceResponse.error(
message: message,
payload: { container_registry_protection_rule: nil }
)
end
end
end
end

View File

@ -39,7 +39,7 @@
= render Pajamas::ToggleComponent.new(classes: 'js-force-push-toggle',
label: s_("ProtectedBranch|Allowed to force push"),
label_position: :hidden) do
- force_push_docs_url = help_page_url('topics/git/git_rebase', anchor: 'force-push')
- force_push_docs_url = help_page_url('topics/git/git_rebase', anchor: 'force-pushing')
- force_push_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: force_push_docs_url }
= (s_("ProtectedBranch|Allow all users with push access to %{tag_start}force push%{tag_end}.") % { tag_start: force_push_link_start, tag_end: '</a>' }).html_safe
= render_if_exists 'protected_branches/ee/code_owner_approval_form', f: f, protected_branch_entity: protected_branch_entity

View File

@ -315,7 +315,6 @@ module Gitlab
config.assets.precompile << "page_bundles/jira_connect.css"
config.assets.precompile << "page_bundles/learn_gitlab.css"
config.assets.precompile << "page_bundles/login.css"
config.assets.precompile << "page_bundles/marketing_popover.css"
config.assets.precompile << "page_bundles/members.css"
config.assets.precompile << "page_bundles/merge_conflicts.css"
config.assets.precompile << "page_bundles/merge_request_analytics.css"

View File

@ -0,0 +1,8 @@
---
name: bitbucket_server_importer_exponential_backoff
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/137974
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/432974
milestone: '16.7'
type: development
group: group::import and integrate
default_enabled: false

View File

@ -27,6 +27,8 @@ Redis::Cluster::SlotLoader.prepend(Gitlab::Patch::SlotLoader)
Redis::Cluster::CommandLoader.prepend(Gitlab::Patch::CommandLoader)
Redis::Cluster.prepend(Gitlab::Patch::RedisCluster)
ConnectionPool.prepend(Gitlab::Instrumentation::ConnectionPool)
if Gitlab::Redis::Workhorse.params[:cluster].present?
raise "Do not configure workhorse with a Redis Cluster as pub/sub commands are not cluster-compatible."
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddIndexToBulkImportsOnUpdatedAtAndStatus < Gitlab::Database::Migration[2.2]
milestone '16.7'
disable_ddl_transaction!
INDEX_NAME = 'index_bulk_imports_on_updated_at_and_id_for_stale_status'
def up
add_concurrent_index :bulk_imports, [:updated_at, :id],
where: 'STATUS in (0, 1)', name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :bulk_imports, name: INDEX_NAME
end
end

View File

@ -0,0 +1 @@
15853bc68a9e5bbf2e45ed646f3630fcfbeed9a8a21b1edbd02f92946b410b88

View File

@ -31920,6 +31920,8 @@ CREATE INDEX index_bulk_import_failures_on_bulk_import_entity_id ON bulk_import_
CREATE INDEX index_bulk_import_failures_on_correlation_id_value ON bulk_import_failures USING btree (correlation_id_value);
CREATE INDEX index_bulk_imports_on_updated_at_and_id_for_stale_status ON bulk_imports USING btree (updated_at, id) WHERE (status = ANY (ARRAY[0, 1]));
CREATE INDEX index_bulk_imports_on_user_id ON bulk_imports USING btree (user_id);
CREATE INDEX index_catalog_resource_components_on_catalog_resource_id ON catalog_resource_components USING btree (catalog_resource_id);

View File

@ -30,6 +30,7 @@ swap:
ex: "for example"
filename: "file name"
filesystem: "file system"
fullscreen: "full screen"
info: "information"
installation from source: self-compiled installation
installations from source: self-compiled installations

View File

@ -177,6 +177,8 @@ The following metrics are available:
| `gitlab_ci_queue_iteration_duration_seconds` | Histogram | 16.3 | Time it takes to find a build in CI/CD queue |
| `gitlab_ci_queue_retrieval_duration_seconds` | Histogram | 16.3 | Time it takes to execute a SQL query to retrieve builds queue |
| `gitlab_ci_queue_active_runners_total` | Histogram | 16.3 | The amount of active runners that can process queue in a project |
| `gitlab_connection_pool_size` | Gauge | 16.7 | Size of connection pool |
| `gitlab_connection_pool_available_count` | Gauge | 16.7 | Number of available connections in the pool |
## Metrics controlled by a feature flag

View File

@ -18,8 +18,8 @@ on the GitLab server. You can use them to run Git-related tasks such as:
Git server hooks use `pre-receive`, `post-receive`, and `update`
[Git server-side hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks#_server_side_hooks).
GitLab administrators configure server hooks on the file system of the GitLab server. If you don't have file system access,
alternatives to server hooks include:
GitLab administrators configure server hooks through the Gitaly CLI, which connects to the Gitaly gRPC API.
If you don't have access to the Gitaly CLI, alternatives to server hooks include:
- [Webhooks](../user/project/integrations/webhooks.md).
- [GitLab CI/CD](../ci/index.md).

View File

@ -3241,6 +3241,31 @@ Input type: `DeleteAnnotationInput`
| <a id="mutationdeleteannotationclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationdeleteannotationerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
### `Mutation.deleteContainerRegistryProtectionRule`
Deletes a container registry protection rule. Available only when feature flag `container_registry_protected_containers` is enabled.
WARNING:
**Introduced** in 16.7.
This feature is an Experiment. It can be changed or removed at any time.
Input type: `DeleteContainerRegistryProtectionRuleInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationdeletecontainerregistryprotectionruleclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationdeletecontainerregistryprotectionruleid"></a>`id` | [`ContainerRegistryProtectionRuleID!`](#containerregistryprotectionruleid) | Global ID of the container registry protection rule to delete. |
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationdeletecontainerregistryprotectionruleclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationdeletecontainerregistryprotectionrulecontainerregistryprotectionrule"></a>`containerRegistryProtectionRule` | [`ContainerRegistryProtectionRule`](#containerregistryprotectionrule) | Container registry protection rule that was deleted successfully. |
| <a id="mutationdeletecontainerregistryprotectionruleerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
### `Mutation.deletePackagesProtectionRule`
Deletes a protection rule for packages. Available only when feature flag `packages_protected_packages` is enabled.

View File

@ -215,11 +215,7 @@ The approval status details are shown:
## View blocked deployments
Use the UI or API to review the status of your deployments, including whether a deployment is blocked.
::Tabs
:::TabTitle With the UI
Review the status of your deployments, including whether a deployment is blocked.
To view your deployments:
@ -229,16 +225,9 @@ To view your deployments:
A deployment with the **blocked** label is blocked.
:::TabTitle With the API
To view your deployments:
- Using the [deployments API](../../api/deployments.md#get-a-specific-deployment), get a specific deployment, or a list of all deployments in a project.
To view your deployments, you can also [use the API](../../api/deployments.md#get-a-specific-deployment).
The `status` field indicates whether a deployment is blocked.
::EndTabs
## Related topics
- [Deployment approvals feature epic](https://gitlab.com/groups/gitlab-org/-/epics/6832)

View File

@ -72,6 +72,7 @@ RAILS_ENV=development bundle exec rake gitlab:duo:setup['<test-group-name>']
1. **Group Settings** > **General** -> **Permissions and group features**
1. Enable **Experiment & Beta features**
1. Enable the specific feature flag for the feature you want to test
1. You can use Rake task `rake gitlab:duo:enable_feature_flags` to enable all feature flags that are assigned to group AI Framework
1. Set the required access token. To receive an access token:
1. For Vertex, follow the [instructions below](#configure-gcp-vertex-access).
1. For all other providers, like Anthropic, create an access request where `@m_gill`, `@wayne`, and `@timzallmann` are the tech stack owners.

View File

@ -700,6 +700,11 @@ The **upstream project** (also known as the **source project**) and the **fork**
If the **fork relationship** is removed, the
**fork** is **unlinked** from the **upstream project**.
## full screen
Use two words for **full screen**.
([Vale](../testing.md#vale) rule: [`SubstitutionWarning.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/.vale/gitlab/SubstitutionWarning.yml))
## future tense
When possible, use present tense instead of future tense. For example, use **after you execute this command, GitLab displays the result** instead of **after you execute this command, GitLab will display the result**. ([Vale](../testing.md#vale) rule: [`FutureTense.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/.vale/gitlab/FutureTense.yml))

View File

@ -369,7 +369,7 @@ git push origin main
```
Sometimes Git does not allow you to push to a repository. Instead,
you must [force an update](../topics/git/git_rebase.md#force-push).
you must [force an update](../topics/git/git_rebase.md#force-pushing).
### Delete all changes in the branch

View File

@ -7,95 +7,107 @@ description: "Introduction to Git rebase and force push, methods to resolve merg
# Git rebase and force push **(FREE ALL)**
This guide helps you to get started with rebases, force pushes, and fixing
[merge conflicts](../../user/project/merge_requests/conflicts.md) locally.
Before you attempt a force push or a rebase, make sure you are familiar with
[Git through the command line](../../gitlab-basics/start-using-git.md).
In Git, a rebase updates your branch with the contents of another branch.
A rebase confirms that changes in your branch don't conflict with
changes in the target branch.
WARNING:
`git rebase` rewrites the commit history. It **can be harmful** to do it in
shared branches. It can cause complex and hard to resolve
[merge conflicts](../../user/project/merge_requests/conflicts.md). In
these cases, instead of rebasing your branch against the default branch,
consider pulling it instead (`git pull origin master`). Pulling has similar
effects with less risk compromising the work of your contributors.
If you have a [merge conflict](../../user/project/merge_requests/conflicts.md),
you can rebase to fix it.
In Git, a rebase updates your feature branch with the contents of another branch.
This step is important for Git-based development strategies. Use a rebase to confirm
that your branch's changes don't conflict with any changes added to your target branch
_after_ you created your feature branch.
## What happens during rebase
When you rebase:
1. Git imports all the commits submitted to your target branch _after_ you initially created
your feature branch from it.
1. Git stacks the commits you have in your feature branch on top of all
1. Git imports all the commits submitted to your target branch after you initially created
your branch from it.
1. Git stacks the commits you have in your branch on top of all
the commits it imported from that branch:
![Git rebase illustration](img/git_rebase_v13_5.png)
![Git rebase illustration](img/git_rebase_v13_5.png)
While most rebases are performed against `main`, you can rebase against any other
branch, such as `release-15-3`. You can also specify a different remote repository
(such as `upstream`) instead of `origin`.
## Back up a branch before rebase
WARNING:
`git rebase` rewrites the commit history. It **can be harmful** to do it in
shared branches. It can cause complex and hard to resolve
merge conflicts. Instead of rebasing your branch against the default branch,
consider pulling it instead (`git pull origin master`). Pulling has similar
effects with less risk of compromising others' work.
To back up a branch before taking any destructive action, like a rebase or force push:
## Rebase by using Git
1. Open your feature branch in the terminal: `git checkout my-feature`
1. Create a backup branch: `git branch my-feature-backup`
Any changes added to `my-feature` after this point are lost
if you restore from the backup branch.
When you use Git to rebase, each commit is applied to your branch.
When merge conflicts occur, you are prompted to address them.
Your branch is backed up, and you can try a rebase or a force push.
If anything goes wrong, restore your branch from its backup:
1. Make sure you're in the correct branch (`my-feature`): `git checkout my-feature`
1. Reset it against `my-feature-backup`: `git reset --hard my-feature-backup`
## Rebase a branch
[Rebases](https://git-scm.com/docs/git-rebase) are very common operations in
Git, and have these options:
- **Regular rebases.** This type of rebase can be done through the
[command line](#regular-rebase) and [the GitLab UI](#from-the-gitlab-ui).
- [**Interactive rebases**](#interactive-rebase) give more flexibility by
enabling you to specify how to handle each commit. Interactive rebases
must be done on the command line.
Any user who rebases a branch is treated as having added commits to that branch.
If a project is configured to
[**prevent approvals by users who add commits**](../../user/project/merge_requests/approvals/settings.md#prevent-approvals-by-users-who-add-commits),
a user who rebases a branch cannot also approve its merge request.
### Regular rebase
Standard rebases replay the previous commits on a branch without changes, stopping
only if merge conflicts occur.
If you want more advanced options for your commits,
do [an interactive rebase](#rebase-interactively-by-using-git).
Prerequisites:
- You must have permission to force push branches.
- You must have permission to force push to branches.
To update your branch `my-feature` with recent changes from your
[default branch](../../user/project/repository/branches/default.md) (here, using `main`):
To use Git to rebase your branch against the target branch:
1. Fetch the latest changes from `main`: `git fetch origin main`
1. Check out your feature branch: `git checkout my-feature`
1. Rebase it against `main`: `git rebase origin/main`
1. [Force push](#force-push) to your branch.
1. Open a terminal and change to your project.
1. Ensure you have the latest contents of the target branch.
In this example, the target branch is `main`:
If there are merge conflicts, Git prompts you to fix them before continuing the rebase.
```shell
git fetch origin main
```
### From the GitLab UI
1. Check out your branch:
The `/rebase` [quick action](../../user/project/quick_actions.md#issues-merge-requests-and-epics)
rebases your feature branch directly from its merge request if all of these
conditions are met:
```shell
git checkout my-branch
```
- No merge conflicts exist for your feature branch.
- You have the **Developer** role for the source project. This role grants you
1. Optional. Create a backup of your branch:
```shell
git branch my-branch-backup
```
Changes added to `my-branch` after this point are lost
if you restore from the backup branch.
1. Rebase against the main branch:
```shell
git rebase origin/main
```
1. If merge conflicts exist:
1. Fix the conflicts in your editor.
1. Add the files:
```shell
git add .
```
1. Continue the rebase:
```shell
git rebase --continue
```
1. Force push your changes to the target branch, while protecting others' commits:
```shell
git push origin my-branch --force-with-lease
```
## Rebase from the UI
You can rebase a merge request from the GitLab UI.
Prerequisites:
- No merge conflicts must exist.
- You must have at least the **Developer** role for the source project. This role grants you
permission to push to the source branch for the source project.
- If the merge request is in a fork, the fork must allow commits
[from members of the upstream project](../../user/project/merge_requests/allow_collaboration.md).
@ -106,91 +118,112 @@ To rebase from the UI:
1. Type `/rebase` in a comment.
1. Select **Comment**.
GitLab schedules a rebase of the feature branch against the default branch and
GitLab schedules a rebase of the branch against the default branch and
executes it as soon as possible.
### Interactive rebase
## Rebase interactively by using Git
Use an interactive rebase (the `--interactive` flag, or `-i`) to simultaneously
update a branch while you modify how its commits are handled.
For example, to edit the last five commits in your branch (`HEAD~5`), run:
Use an interactive rebase when you want to specify how to handle each commit.
You must do an interactive rebase from the command line.
```shell
git rebase -i HEAD~5
```
Prerequisites:
Git opens the last five commits in your terminal text editor, oldest commit first.
Each commit shows the action to take on it, the SHA, and the commit title:
- [Vim](https://www.vim.org/) must be your text editor to follow these instructions.
```shell
pick 111111111111 Second round of structural revisions
pick 222222222222 Update inbound link to this changed page
pick 333333333333 Shifts from H4 to H3
pick 444444444444 Adds revisions from editorial
pick 555555555555 Revisions continue to build the concept part out
To rebase interactively:
# Rebase 111111111111..222222222222 onto zzzzzzzzzzzz (5 commands)
#
# Commands:
# p, pick <commit> = use commit
# r, reword <commit> = use commit, but edit the commit message
# e, edit <commit> = use commit, but stop for amending
# s, squash <commit> = use commit, but meld into previous commit
# f, fixup [-C | -c] <commit> = like "squash" but keep only the previous
```
1. Open a terminal and change to your project.
1. Ensure you have the latest contents of the target branch.
In this example, the target branch is `main`:
After the list of commits, a commented-out section shows some common actions you
can take on a commit:
```shell
git fetch origin main
```
- **Pick** a commit to use it with no changes. The default option.
- **Reword** a commit message.
- **Edit** a commit to use it, but pause the rebase to amend (add changes to) it.
- **Squash** multiple commits together to simplify the commit history
of your feature branch.
1. Check out your branch:
Replace the keyword `pick` according to
the operation you want to perform in each commit. To do so, edit
the commits in your terminal's text editor.
```shell
git checkout my-branch
```
For example, with [Vim](https://www.vim.org/) as the text editor in
a macOS Zsh shell, you can `squash` or `fixup` (combine) all of the commits together:
1. Optional. Create a backup of your branch:
NOTE:
The steps for editing through the command line can be slightly
different depending on your operating system and the shell you use.
```shell
git branch my-branch-backup
```
Changes added to `my-branch` after this point are lost
if you restore from the backup branch.
1. In the GitLab UI, in your merge request, confirm how many commits
you want to rebase by viewing the **Commits** tab.
1. Open these commits. For example, to edit the last five commits in your branch (`HEAD~5`), type:
```shell
git rebase -i HEAD~5
```
Git opens the last five commits in your terminal text editor, oldest commit first.
Each commit shows the action to take on it, the SHA, and the commit title:
```shell
pick 111111111111 Second round of structural revisions
pick 222222222222 Update inbound link to this changed page
pick 333333333333 Shifts from H4 to H3
pick 444444444444 Adds revisions from editorial
pick 555555555555 Revisions continue to build the concept part out
# Rebase 111111111111..222222222222 onto zzzzzzzzzzzz (5 commands)
#
# Commands:
# p, pick <commit> = use commit
# r, reword <commit> = use commit, but edit the commit message
# e, edit <commit> = use commit, but stop for amending
# s, squash <commit> = use commit, but meld into previous commit
# f, fixup [-C | -c] <commit> = like "squash" but keep only the previous
```
1. Switch to Vim's edit mode by pressing <kbd>i</kbd>.
1. Move to the second commit in the list by using your keyboard arrows.
1. Change the word `pick` to `squash` or `fixup` (or `s` or `f`).
1. Do the same for the remaining commits. Leave the first commit as `pick`.
1. End edit mode, save, and quit:
- Press <kbd>ESC</kbd>.
- Type `:wq`.
1. Press <kbd>i</kbd> on your keyboard to switch to Vim's editing mode.
1. Use your keyboard arrows to edit the **second** commit keyword
from `pick` to `squash` or `fixup` (or `s` or `f`). Do the same to the remaining commits.
Leave the first commit **unchanged** (`pick`) as we want to squash
all other commits into it.
1. Press <kbd>Escape</kbd> to leave the editing mode.
1. Type `:wq` to "write" (save) and "quit".
1. When squashing, Git outputs the commit message so you have a chance to edit it:
- All lines starting with `#` are ignored and not included in the commit
message. Everything else is included.
- To leave it as it is, type `:wq`. To edit the commit message: switch to the
editing mode, edit the commit message, and save it as you just did.
1. If you haven't pushed your commits to the remote branch before rebasing,
push your changes without a force push. If you had pushed these commits already,
[force push](#force-push) instead.
message. Everything else is included.
- To leave it as-is, type `:wq`. To edit the commit message, switch to
edit mode, edit the commit message, and save.
#### Configure squash options for a project
1. Commit to the target branch.
Keeping the default branch commit history clean doesn't require you to
manually squash all your commits on each merge request. GitLab provides
[squash and merge](../../user/project/merge_requests/squash_and_merge.md#configure-squash-options-for-a-project),
options at a project level.
- If you didn't push your commits to the target branch before rebasing,
push your changes without a force push:
## Force push
```shell
git push origin my-branch
```
- If you pushed these commits already, use a force push:
```shell
git push origin my-branch --force-with-lease
```
## Force pushing
Complex operations in Git require you to force an update to the remote branch.
Operations like squashing commits, resetting a branch, or rebasing a branch rewrite
the history of your branch. Git requires a forced update to help safeguard against
these more destructive changes from happening accidentally.
Force pushing is not recommended on shared branches, as you risk destroying the
changes of others.
Force pushing is not recommended on shared branches, because you risk destroying
others' changes.
If the branch you want to force push is [protected](../../user/project/protected_branches.md),
you can't force push to it unless you either:
@ -201,27 +234,32 @@ you can't force push to it unless you either:
Then you can force push and protect it again.
### `--force-with-lease` flag
## Restore your backed up branch
The [`--force-with-lease`](https://git-scm.com/docs/git-push#Documentation/git-push.txt---force-with-leaseltrefnamegt)
flag force pushes. Because it preserves any new commits added to the remote
branch by other people, it is safer than `--force`:
Your branch is backed up, and you can try a rebase or a force push.
If anything goes wrong, restore your branch from its backup:
```shell
git push --force-with-lease origin my-feature
```
1. Make sure you're in the correct branch:
### `--force` flag
```shell
git checkout my-branch
```
The `--force` flag forces pushes, but does not preserve any new commits added to
the remote branch by other people. To use this method, pass the flag `--force` or `-f`
to the `push` command:
1. Reset your branch against the backup:
```shell
git push --force origin my-feature
```
```shell
git reset --hard my-branch-backup
```
## Approving after rebase
If you rebase a branch, you've added commits.
If your project is configured to
[prevent approvals by users who add commits](../../user/project/merge_requests/approvals/settings.md#prevent-approvals-by-users-who-add-commits),
you can't approve a merge request if you have rebased it.
## Related topics
- [Numerous undo possibilities in Git](numerous_undo_possibilities_in_git/index.md#undo-staged-local-changes-without-modifying-history)
- [Git documentation for branches and rebases](https://git-scm.com/book/en/v2/Git-Branching-Rebasing)
- [Project squash and merge settings](../../user/project/merge_requests/squash_and_merge.md#configure-squash-options-for-a-project)

View File

@ -51,3 +51,73 @@ For more information, see [working-tree-encoding](https://git-scm.com/docs/gitat
The `.gitattributes` file can be used to define which language to use when
syntax highlighting files and diffs. For more information, see
[Syntax highlighting](highlighting.md).
## Custom merge drivers
> Ability to configure custom merge drivers through GitLab introduced in GitLab 15.10.
You can define [custom merge drivers](https://git-scm.com/docs/gitattributes#_defining_a_custom_merge_driver)
in a GitLab configuration file, then use the custom merge drivers in a Git
`.gitattributes` file.
You might configure a custom merge driver, for example, if there are certain
files that should be ignored during a merge such as build files and configuration files.
### Configure a custom merge driver
The following example illustrates how to define and use a custom merge driver in
GitLab.
How to configure a custom merge driver depends on the type of installation.
::Tabs
:::TabTitle Linux package (Omnibus)
1. Edit `/etc/gitlab/gitlab.rb`.
1. Add configuration similar to the following:
```ruby
gitaly['configuration'] = {
# ...
git: {
# ...
config: [
# ...
{ key: "merge.foo.driver", value: "true" },
],
},
}
```
:::TabTitle Self-compiled (source)
1. Edit `gitaly.toml`.
1. Add configuration similar to the following:
```toml
[[git.config]]
key = "merge.foo.driver"
value = "true"
```
::EndTabs
In this example, during a merge, Git uses the `driver` value as the command to execute. In
this case, because we are using [`true`](https://man7.org/linux/man-pages/man1/true.1.html)
with no arguments, it always returns a non-zero return code. This means that for
the files specified in `.gitattributes`, merges do nothing.
To use your own merge driver, replace the value in `driver` to point to an
executable. For more details on how this command is invoked, please see the Git
documentation on [custom merge drivers](https://git-scm.com/docs/gitattributes#_defining_a_custom_merge_driver).
### Use `.gitattributes` to set files custom merge driver applies to
In a `.gitattributes` file, you can set the paths of files you want to use with the custom merge driver. For example:
```plaintext
config/* merge=foo
```
In this case, every file under the `config/` folder uses the custom merge driver called `foo` defined in the GitLab configuration.

View File

@ -105,7 +105,7 @@ most control over each change:
git switch my-feature-branch
```
1. [Rebase your branch](../../../topics/git/git_rebase.md#regular-rebase) against the
1. [Rebase your branch](../../../topics/git/git_rebase.md#rebase-by-using-git) against the
target branch (here, `main`) so Git prompts you with the conflicts:
```shell
@ -150,7 +150,7 @@ most control over each change:
running `git rebase`.
After you run `git rebase --continue`, you cannot abort the rebase.
1. [Force-push](../../../topics/git/git_rebase.md#force-push) the changes to your
1. [Force-push](../../../topics/git/git_rebase.md#force-pushing) the changes to your
remote branch.
## Merge commit strategy

View File

@ -198,7 +198,7 @@ In these merge methods, you can merge only when your source branch is up-to-date
If a fast-forward merge is not possible but a conflict-free rebase is possible,
GitLab provides:
- The [`/rebase` quick action](../../../../topics/git/git_rebase.md#from-the-gitlab-ui).
- The [`/rebase` quick action](../../../../topics/git/git_rebase.md#rebase-from-the-ui).
- The option to select **Rebase** in the user interface.
You must rebase the source branch locally before a fast-forward merge if both

View File

@ -267,7 +267,7 @@ Deploy keys are not available in the **Allowed to merge** dropdown list.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/15611) in GitLab 13.10 [with a flag](../../administration/feature_flags.md) named `allow_force_push_to_protected_branches`. Disabled by default.
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/323431) in GitLab 14.0. Feature flag `allow_force_push_to_protected_branches` removed.
You can allow [force pushes](../../topics/git/git_rebase.md#force-push) to
You can allow [force pushes](../../topics/git/git_rebase.md#force-pushing) to
protected branches.
To protect a new branch and enable force push:

View File

@ -86,7 +86,7 @@ reduce the repository size for another import attempt:
1. To reduce the repository size, work on this `smaller-tmp-main` branch:
[identify and remove large files](../repository/reducing_the_repo_size_using_git.md)
or [interactively rebase and fixup](../../../topics/git/git_rebase.md#interactive-rebase)
or [interactively rebase and fixup](../../../topics/git/git_rebase.md#rebase-interactively-by-using-git)
to reduce the number of commits.
```shell

View File

@ -2,6 +2,8 @@ PATH
remote: .
specs:
gitlab-secret_detection (0.1.0)
re2 (~> 2.4)
toml-rb (~> 2.2)
GEM
remote: https://rubygems.org/
@ -24,6 +26,7 @@ GEM
bigdecimal (3.1.4)
binding_of_caller (1.0.0)
debug_inspector (>= 0.0.1)
citrus (3.0.2)
coderay (1.1.3)
concurrent-ruby (1.2.2)
connection_pool (2.4.1)
@ -31,8 +34,8 @@ GEM
diff-lcs (1.5.0)
drb (2.2.0)
ruby2_keywords
gitlab-styles (10.1.0)
rubocop (~> 1.50.2)
gitlab-styles (11.0.0)
rubocop (~> 1.57.1)
rubocop-graphql (~> 0.18)
rubocop-performance (~> 1.15)
rubocop-rails (~> 2.17)
@ -40,6 +43,8 @@ GEM
i18n (1.14.1)
concurrent-ruby (~> 1.0)
json (2.6.3)
language_server-protocol (3.17.0.3)
mini_portile2 (2.8.5)
minitest (5.20.0)
mutex_m (0.2.0)
parallel (1.23.0)
@ -50,9 +55,11 @@ GEM
coderay
parser
unparser
racc (1.7.1)
racc (1.7.3)
rack (3.0.8)
rainbow (3.1.1)
re2 (2.4.3)
mini_portile2 (~> 2.8.5)
regexp_parser (2.8.2)
rexml (3.2.6)
rspec (3.12.0)
@ -84,14 +91,15 @@ GEM
binding_of_caller
rspec-parameterized-core (< 2)
rspec-support (3.12.1)
rubocop (1.50.2)
rubocop (1.57.2)
json (~> 2.3)
language_server-protocol (>= 3.17.0)
parallel (~> 1.10)
parser (>= 3.2.0.0)
parser (>= 3.2.2.4)
rainbow (>= 2.2.2, < 4.0)
regexp_parser (>= 1.8, < 3.0)
rexml (>= 3.2.5, < 4.0)
rubocop-ast (>= 1.28.0, < 2.0)
rubocop-ast (>= 1.28.1, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 3.0)
rubocop-ast (1.30.0)
@ -115,10 +123,12 @@ GEM
rubocop-factory_bot (~> 2.22)
ruby-progressbar (1.13.0)
ruby2_keywords (0.0.5)
toml-rb (2.2.0)
citrus (~> 3.0, > 3.0)
tzinfo (2.0.6)
concurrent-ruby (~> 1.0)
unicode-display_width (2.5.0)
unparser (0.6.9)
unparser (0.6.10)
diff-lcs (~> 1.3)
parser (>= 3.2.2.4)
@ -127,13 +137,13 @@ PLATFORMS
DEPENDENCIES
gitlab-secret_detection!
gitlab-styles (~> 10.1.0)
gitlab-styles (~> 11.0)
rspec (~> 3.0)
rspec-benchmark (~> 0.6.0)
rspec-parameterized (~> 1.0)
rubocop (~> 1.50)
rubocop (~> 1.57)
rubocop-rails (<= 2.20)
rubocop-rspec (~> 2.22)
BUNDLED WITH
2.4.14
2.4.22

View File

@ -24,11 +24,14 @@ Gem::Specification.new do |spec|
spec.files = Dir['lib/**/*.rb']
spec.require_paths = ["lib"]
spec.add_development_dependency "gitlab-styles", "~> 10.1.0"
spec.add_runtime_dependency "re2", "~> 2.4"
spec.add_runtime_dependency "toml-rb", "~> 2.2"
spec.add_development_dependency "gitlab-styles", "~> 11.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "rspec-benchmark", "~> 0.6.0"
spec.add_development_dependency "rspec-parameterized", "~> 1.0"
spec.add_development_dependency "rubocop", "~> 1.50"
spec.add_development_dependency "rubocop", "~> 1.57"
spec.add_development_dependency "rubocop-rails", "<= 2.20" # https://github.com/rubocop/rubocop-rails/issues/1173
spec.add_development_dependency "rubocop-rspec", "~> 2.22"
end

View File

@ -1,6 +1,10 @@
# frozen_string_literal: true
require_relative "secret_detection/version"
require_relative 'secret_detection/version'
require_relative 'secret_detection/status'
require_relative 'secret_detection/finding'
require_relative 'secret_detection/response'
require_relative 'secret_detection/scan'
module Gitlab
module SecretDetection

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
module Gitlab
module SecretDetection
# Finding is a data object representing a secret finding identified within a blob
class Finding
attr_reader :blob_id, :status, :line_number, :type, :description
def initialize(blob_id, status, line_number = nil, type = nil, description = nil)
@blob_id = blob_id
@status = status
@line_number = line_number
@type = type
@description = description
end
def ==(other)
self.class == other.class && other.state == state
end
protected
def state
[blob_id, status, line_number, type, description]
end
end
end
end

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
module Gitlab
module SecretDetection
# Response is the data object returned by the scan operation with the following structure
#
# +status+:: One of values from SecretDetection::Status indicating the scan operation's status
# +results+:: Array of SecretDetection::Finding values. Default value is nil.
class Response
attr_reader :status, :results
def initialize(status, results = nil)
@status = status
@results = results
end
def ==(other)
self.class == other.class && other.state == state
end
protected
def state
[status, results]
end
end
end
end

View File

@ -0,0 +1,193 @@
# frozen_string_literal: true
require 'toml-rb'
require 're2'
require 'logger'
require 'timeout'
module Gitlab
module SecretDetection
# Scan is responsible for running Secret Detection scan operation
class Scan
# RulesetParseError is thrown when the code fails to parse the
# ruleset file from the given path
RulesetParseError = Class.new(StandardError)
# RulesetCompilationError is thrown when the code fails to compile
# the predefined rulesets
RulesetCompilationError = Class.new(StandardError)
# default time limit(in seconds) for running the scan operation per invocation
DEFAULT_SCAN_TIMEOUT_SECS = 60
# default time limit(in seconds) for running the scan operation on a single blob
DEFAULT_BLOB_TIMEOUT_SECS = 5
# file path where the secrets ruleset file is located
RULESET_FILE_PATH = File.expand_path('../../gitleaks.toml', __dir__)
# ignore the scanning of a line which ends with the following keyword
GITLEAKS_KEYWORD_IGNORE = 'gitleaks:allow'
# Initializes the instance with logger along with following operations:
# 1. Parse ruleset for the given +ruleset_path+(default: +RULESET_FILE_PATH+). Raises +RulesetParseError+
# incase the operation fails.
# 2. Extract keywords from the parsed ruleset to use it for matching keywords before regex operation.
# 3. Build and Compile rule regex patterns obtained from the ruleset. Raises +RulesetCompilationError+
# in case the compilation fails.
def initialize(logger: Logger.new($stdout), ruleset_path: RULESET_FILE_PATH)
@logger = logger
@rules = parse_ruleset ruleset_path
@keywords = create_keywords @rules
@matcher = build_pattern_matcher @rules
end
# Runs Secret Detection scan on the list of given blobs. Both the total scan duration and
# the duration for each blob is time bound via +timeout+ and +blob_timeout+ respectively.
#
# +blobs+:: Array of blobs with each blob to have `id` and `data` properties.
# +timeout+:: No of seconds(accepts floating point for smaller time values) to limit the total scan duration
# +blob_timeout+:: No of seconds(accepts floating point for smaller time values) to limit
# the scan duration on each blob
#
# Returns an instance of SecretDetection::Response by following below structure:
# {
# status: One of the SecretDetection::Status values
# results: [SecretDetection::Finding]
# }
#
#
def secrets_scan(blobs, timeout: DEFAULT_SCAN_TIMEOUT_SECS, blob_timeout: DEFAULT_BLOB_TIMEOUT_SECS)
return SecretDetection::Response.new(SecretDetection::Status::INPUT_ERROR) unless validate_scan_input(blobs)
Timeout.timeout timeout do
matched_blobs = filter_by_keywords(blobs)
next SecretDetection::Response.new(SecretDetection::Status::NOT_FOUND) if matched_blobs.empty?
secrets = find_secrets_bulk(matched_blobs, blob_timeout)
scan_status = overall_scan_status secrets
SecretDetection::Response.new(scan_status, secrets)
end
rescue Timeout::Error => e
@logger.error "Secret Detection operation timed out: #{e}"
SecretDetection::Response.new(SecretDetection::Status::SCAN_TIMEOUT)
end
private
attr_reader :logger, :rules, :keywords, :matcher
# parses given ruleset file and returns the parsed rules
def parse_ruleset(ruleset_file_path)
rules_data = TomlRB.load_file(ruleset_file_path)
rules_data['rules']
rescue StandardError => e
logger.error "Failed to parse Secret Detection ruleset from '#{ruleset_file_path}' path: #{e}"
raise RulesetParseError
end
# builds RE2::Set pattern matcher for the given rules
def build_pattern_matcher(rules)
matcher = RE2::Set.new
rules.each do |rule|
matcher.add(rule['regex'])
end
unless matcher.compile
logger.error "Failed to compile Secret Detection rulesets in RE::Set"
raise RulesetCompilationError
end
matcher
end
# creates and returns the unique set of rule matching keywords
def create_keywords(rules)
secrets_keywords = []
rules.each do |rule|
secrets_keywords << rule['keywords']
end
secrets_keywords.flatten.compact.to_set
end
# returns only those blobs that contain atleast one of the keywords
# from the keywords list
def filter_by_keywords(blobs)
matched_blobs = []
blobs.each do |blob|
matched_blobs << blob if keywords.any? { |keyword| blob.data.include?(keyword) }
end
matched_blobs.freeze
end
# finds secrets in the given list of blobs
def find_secrets_bulk(blobs, blob_timeout)
found_secrets = []
blobs.each do |blob|
found_secrets << Timeout.timeout(blob_timeout) do
find_secrets(blob)
end
rescue Timeout::Error => e
logger.error "Secret Detection scan timed out on the blob(id:#{blob.id}): #{e}"
found_secrets << SecretDetection::Finding.new(blob.id,
SecretDetection::Status::BLOB_TIMEOUT)
end
found_secrets.flatten.freeze
end
# finds secrets in the given blob with a timeout circuit breaker
def find_secrets(blob)
secrets = []
blob.data.each_line.with_index do |line, index|
# ignore the line scan if it is suffixed with '#gitleaks:allow'
next if line.end_with?(GITLEAKS_KEYWORD_IGNORE)
patterns = matcher.match(line, :exception => false)
next unless patterns.any?
line_no = index + 1
patterns.each do |pattern|
type = rules[pattern]['id']
description = rules[pattern]['description']
secrets << SecretDetection::Finding.new(blob.id, SecretDetection::Status::FOUND, line_no, type,
description)
end
end
secrets
rescue StandardError => e
logger.error "Secret Detection scan failed on the blob(id:#{blob.id}): #{e}"
SecretDetection::Finding.new(blob.id, SecretDetection::Status::SCAN_ERROR)
end
def validate_scan_input(blobs)
return false if blobs.nil? || !blobs.instance_of?(Array)
blobs.all? do |blob|
next false unless blob.respond_to?(:id) || blob.respond_to?(:data)
blob.data.freeze # freeze blobs to avoid additional object allocations on strings
end
end
def overall_scan_status(found_secrets)
return SecretDetection::Status::NOT_FOUND if found_secrets.empty?
timed_out_blobs = found_secrets.count { |el| el.status == SecretDetection::Status::BLOB_TIMEOUT }
case timed_out_blobs
when 0
SecretDetection::Status::FOUND
when found_secrets.length
SecretDetection::Status::SCAN_TIMEOUT
else
SecretDetection::Status::FOUND_WITH_ERRORS
end
end
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module Gitlab
module SecretDetection
# All the possible statuses emitted by the Scan operation
class Status
NOT_FOUND = 0 # When scan operation completes with zero findings
FOUND = 1 # When scan operation completes with one or more findings
FOUND_WITH_ERRORS = 2 # When scan operation completes with one or more findings along with some errors
SCAN_TIMEOUT = 3 # When the scan operation runs beyond given time out
BLOB_TIMEOUT = 4 # When the scan operation on a blob runs beyond given time out
SCAN_ERROR = 5 # When the scan operation fails due to regex error
INPUT_ERROR = 6 # When the scan operation fails due to invalid input
end
end
end

View File

@ -0,0 +1,49 @@
# This file contains a subset of rules pulled from the original source file.
# Original Source: https://gitlab.com/gitlab-org/security-products/analyzers/secrets/-/blob/master/gitleaks.toml
# Reference: https://gitlab.com/gitlab-org/gitlab/-/issues/427011
title = "gitleaks config"
[[rules]]
id = "gitlab_personal_access_token"
description = "GitLab Personal Access Token"
regex = '''glpat-[0-9a-zA-Z_\-]{20}'''
tags = ["gitlab", "revocation_type"]
keywords = [
"glpat",
]
[[rules]]
id = "gitlab_pipeline_trigger_token"
description = "GitLab Pipeline Trigger Token"
regex = '''glptt-[0-9a-zA-Z_\-]{20}'''
tags = ["gitlab"]
keywords = [
"glptt",
]
[[rules]]
id = "gitlab_runner_registration_token"
description = "GitLab Runner Registration Token"
regex = '''GR1348941[0-9a-zA-Z_\-]{20}'''
tags = ["gitlab"]
keywords = [
"GR1348941",
]
[[rules]]
id = "gitlab_runner_auth_token"
description = "GitLab Runner Authentication Token"
regex = '''glrt-[0-9a-zA-Z_\-]{20}'''
tags = ["gitlab"]
keywords = [
"glrt",
]
[[rules]]
id = "gitlab_feed_token"
description = "GitLab Feed Token"
regex = '''glft-[0-9a-zA-Z_\-]{20}'''
tags = ["gitlab"]
keywords = [
"glft",
]

View File

@ -1,7 +0,0 @@
# frozen_string_literal: true
RSpec.describe Gitlab::SecretDetection do
it "has a version number" do
expect(Gitlab::SecretDetection::VERSION).not_to be_nil
end
end

View File

@ -0,0 +1,199 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detection do
subject(:scan) { described_class.new }
def new_blob(id:, data:)
Struct.new(:id, :data).new(id, data)
end
let(:ruleset) do
{
"title" => "gitleaks config",
"rules" => [
{ "id" => "gitlab_personal_access_token",
"description" => "GitLab Personal Access Token",
"regex" => "glpat-[0-9a-zA-Z_\\-]{20}",
"tags" => %w[gitlab revocation_type],
"keywords" => ["glpat"] },
{ "id" => "gitlab_pipeline_trigger_token",
"description" => "GitLab Pipeline Trigger Token",
"regex" => "glptt-[0-9a-zA-Z_\\-]{20}",
"tags" => ["gitlab"],
"keywords" => ["glptt"] },
{ "id" => "gitlab_runner_registration_token",
"description" => "GitLab Runner Registration Token",
"regex" => "GR1348941[0-9a-zA-Z_-]{20}",
"tags" => ["gitlab"],
"keywords" => ["GR1348941"] },
{ "id" => "gitlab_feed_token",
"description" => "GitLab Feed Token",
"regex" => "glft-[0-9a-zA-Z_-]{20}",
"tags" => ["gitlab"],
"keywords" => ["glft"] }
]
}
end
it "does not raise an error parsing the toml file" do
expect { scan }.not_to raise_error
end
context "when it creates RE2 patterns from file data" do
before do
allow(scan).to receive(:parse_ruleset).and_return(ruleset)
end
it "does not raise an error when building patterns" do
expect { scan }.not_to raise_error
end
end
context "when matching patterns" do
before do
allow(scan).to receive(:parse_ruleset).and_return(ruleset)
end
context 'when the blob does not contain a secret' do
let(:blobs) do
[
new_blob(id: 1234, data: "no secrets")
]
end
it "does not match" do
expected_response = Gitlab::SecretDetection::Response.new(Gitlab::SecretDetection::Status::NOT_FOUND)
expect(scan.secrets_scan(blobs)).to eq(expected_response)
end
it "attempts to keyword match returning no blobs for further scan" do
expect(scan).to receive(:filter_by_keywords).with(blobs).and_return([])
scan.secrets_scan(blobs)
end
it "does not attempt to regex match" do
expect(scan).not_to receive(:match_rules_bulk)
scan.secrets_scan(blobs)
end
end
context "when multiple blobs contains secrets" do
let(:blobs) do
[
new_blob(id: 111, data: "glpat-12312312312312312312"), # gitleaks:allow
new_blob(id: 222, data: "\n\nglptt-12312312312312312312"), # gitleaks:allow
new_blob(id: 333, data: "data with no secret"),
new_blob(id: 444, data: "GR134894112312312312312312312\nglft-12312312312312312312") # gitleaks:allow
]
end
it "matches glpat" do
expected_response = Gitlab::SecretDetection::Response.new(
Gitlab::SecretDetection::Status::FOUND,
[
Gitlab::SecretDetection::Finding.new(
blobs[0].id,
Gitlab::SecretDetection::Status::FOUND,
1,
ruleset['rules'][0]['id'],
ruleset['rules'][0]['description']
),
Gitlab::SecretDetection::Finding.new(
blobs[1].id,
Gitlab::SecretDetection::Status::FOUND,
3,
ruleset['rules'][1]['id'],
ruleset['rules'][1]['description']
),
Gitlab::SecretDetection::Finding.new(
blobs[3].id,
Gitlab::SecretDetection::Status::FOUND,
1,
ruleset['rules'][2]['id'],
ruleset['rules'][2]['description']
),
Gitlab::SecretDetection::Finding.new(
blobs[3].id,
Gitlab::SecretDetection::Status::FOUND,
2,
ruleset['rules'][3]['id'],
ruleset['rules'][3]['description']
)
]
)
expect(scan.secrets_scan(blobs)).to eq(expected_response)
end
end
context "when configured with time out" do
let(:large_data) do
("large data with a secret glpat-12312312312312312312\n" * 10_000_000).freeze # gitleaks:allow
end
let(:blobs) do
[
new_blob(id: 111, data: "GR134894112312312312312312312"), # gitleaks:allow
new_blob(id: 333, data: "data with no secret"),
new_blob(id: 333, data: large_data)
]
end
it "whole secret detection scan operation times out" do
scan_timeout_secs = 0.000_001 # 1 micro-sec to intentionally timeout large blob
response = Gitlab::SecretDetection::Response.new(Gitlab::SecretDetection::Status::SCAN_TIMEOUT)
expect(scan.secrets_scan(blobs, timeout: scan_timeout_secs)).to eq(response)
end
it "one of the blobs times out while others continue to get scanned" do
each_blob_timeout_secs = 0.000_001 # 1 micro-sec to intentionally timeout large blob
expected_response = Gitlab::SecretDetection::Response.new(
Gitlab::SecretDetection::Status::FOUND_WITH_ERRORS,
[
Gitlab::SecretDetection::Finding.new(
blobs[0].id, Gitlab::SecretDetection::Status::FOUND, 1,
ruleset['rules'][2]['id'],
ruleset['rules'][2]['description']
),
Gitlab::SecretDetection::Finding.new(
blobs[2].id, Gitlab::SecretDetection::Status::BLOB_TIMEOUT
)
])
expect(scan.secrets_scan(blobs, blob_timeout: each_blob_timeout_secs)).to eq(expected_response)
end
it "all the blobs time out" do
each_blob_timeout_secs = 0.000_001 # 1 micro-sec to intentionally timeout large blob
all_large_blobs = [
new_blob(id: 111, data: large_data),
new_blob(id: 222, data: large_data),
new_blob(id: 333, data: large_data)
]
# scan status changes to SCAN_TIMEOUT when *all* the blobs time out
expected_scan_status = Gitlab::SecretDetection::Status::SCAN_TIMEOUT
expected_response = Gitlab::SecretDetection::Response.new(
expected_scan_status,
[
Gitlab::SecretDetection::Finding.new(
all_large_blobs[0].id, Gitlab::SecretDetection::Status::BLOB_TIMEOUT
),
Gitlab::SecretDetection::Finding.new(
all_large_blobs[1].id, Gitlab::SecretDetection::Status::BLOB_TIMEOUT
),
Gitlab::SecretDetection::Finding.new(
all_large_blobs[2].id, Gitlab::SecretDetection::Status::BLOB_TIMEOUT
)
])
expect(scan.secrets_scan(all_large_blobs, blob_timeout: each_blob_timeout_secs)).to eq(expected_response)
end
end
end
end

View File

@ -3,6 +3,7 @@
module BitbucketServer
class Connection
include ActionView::Helpers::SanitizeHelper
include BitbucketServer::RetryWithDelay
DEFAULT_API_VERSION = '1.0'
SEPARATOR = '/'
@ -31,10 +32,13 @@ module BitbucketServer
end
def get(path, extra_query = {})
response = Gitlab::HTTP.get(build_url(path),
basic_auth: auth,
headers: accept_headers,
query: extra_query)
response = if Feature.enabled?(:bitbucket_server_importer_exponential_backoff)
retry_with_delay do
Gitlab::HTTP.get(build_url(path), basic_auth: auth, headers: accept_headers, query: extra_query)
end
else
Gitlab::HTTP.get(build_url(path), basic_auth: auth, headers: accept_headers, query: extra_query)
end
check_errors!(response)
@ -44,10 +48,13 @@ module BitbucketServer
end
def post(path, body)
response = Gitlab::HTTP.post(build_url(path),
basic_auth: auth,
headers: post_headers,
body: body)
response = if Feature.enabled?(:bitbucket_server_importer_exponential_backoff)
retry_with_delay do
Gitlab::HTTP.post(build_url(path), basic_auth: auth, headers: post_headers, body: body)
end
else
Gitlab::HTTP.post(build_url(path), basic_auth: auth, headers: post_headers, body: body)
end
check_errors!(response)
@ -63,10 +70,13 @@ module BitbucketServer
def delete(resource, path, body)
url = delete_url(resource, path)
response = Gitlab::HTTP.delete(url,
basic_auth: auth,
headers: post_headers,
body: body)
response = if Feature.enabled?(:bitbucket_server_importer_exponential_backoff)
retry_with_delay do
Gitlab::HTTP.delete(url, basic_auth: auth, headers: post_headers, body: body)
end
else
Gitlab::HTTP.delete(url, basic_auth: auth, headers: post_headers, body: body)
end
check_errors!(response)
@ -121,5 +131,9 @@ module BitbucketServer
build_url(path)
end
end
def logger
Gitlab::BitbucketServerImport::Logger
end
end
end

View File

@ -0,0 +1,32 @@
# frozen_string_literal: true
module BitbucketServer
module RetryWithDelay
extend ActiveSupport::Concern
MAXIMUM_DELAY = 20
def retry_with_delay(&block)
run_retry_with_delay(&block)
end
private
def run_retry_with_delay
response = yield
if response.code == 429 && response.headers.has_key?('retry-after')
retry_after = response.headers['retry-after'].to_i
if retry_after <= MAXIMUM_DELAY
logger.info(message: "Retrying in #{retry_after} seconds due to 429 Too Many Requests")
sleep retry_after
response = yield
end
end
response
end
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
module Gitlab
module Instrumentation
# rubocop:disable Gitlab/ModuleWithInstanceVariables -- this module patches ConnectionPool to instrument it
module ConnectionPool
def initialize(options = {}, &block)
@name = options.fetch(:name, 'unknown')
super
end
def checkout(options = {})
conn = super
connection_class = conn.class.to_s
track_available_connections(connection_class)
track_pool_size(connection_class)
conn
end
def track_pool_size(connection_class)
# this means that the size metric for this pool key has been sent
return if @size_gauge
@size_gauge ||= ::Gitlab::Metrics.gauge(:gitlab_connection_pool_size, 'Size of connection pool', {}, :all)
@size_gauge.set({ pool_name: @name, pool_key: @key, connection_class: connection_class }, @size)
end
def track_available_connections(connection_class)
@available_gauge ||= ::Gitlab::Metrics.gauge(:gitlab_connection_pool_available_count,
'Number of available connections in the pool', {}, :all)
@available_gauge.set({ pool_name: @name, pool_key: @key, connection_class: connection_class }, available)
end
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
end
end

View File

@ -30,7 +30,7 @@ module Gitlab
end
def pool
@pool ||= ConnectionPool.new(size: pool_size) { redis }
@pool ||= ConnectionPool.new(size: pool_size, name: store_name.underscore) { redis }
end
def pool_size

View File

@ -40277,9 +40277,6 @@ msgstr ""
msgid "Reply to this email directly or %{view_it_on_gitlab}."
msgstr ""
msgid "Reply..."
msgstr ""
msgid "Reply…"
msgstr ""
@ -51541,6 +51538,9 @@ msgstr ""
msgid "Unauthorized to create an environment"
msgstr ""
msgid "Unauthorized to delete a container registry protection rule"
msgstr ""
msgid "Unauthorized to delete a package protection rule"
msgstr ""

View File

@ -60,7 +60,7 @@
"@gitlab/favicon-overlay": "2.0.0",
"@gitlab/fonts": "^1.3.0",
"@gitlab/svgs": "3.72.0",
"@gitlab/ui": "^71.1.1",
"@gitlab/ui": "^71.3.0",
"@gitlab/visual-review-tools": "1.7.3",
"@gitlab/web-ide": "^0.0.1-dev-20231129035648",
"@mattiasbuelens/web-streams-adapter": "^0.1.0",

View File

@ -118,7 +118,7 @@ describe('Abuse Report Add Note', () => {
expect(findReplyTextarea().exists()).toBe(true);
expect(findReplyTextarea().attributes()).toMatchObject({
rows: '1',
placeholder: 'Reply',
placeholder: 'Reply',
'aria-label': 'Reply to comment',
});
});

View File

@ -2,11 +2,12 @@ import { shallowMount } from '@vue/test-utils';
import { escape } from 'lodash';
import ItemTitle from '~/work_items/components/item_title.vue';
const createComponent = ({ title = 'Sample title', disabled = false } = {}) =>
const createComponent = ({ title = 'Sample title', disabled = false, useH1 = false } = {}) =>
shallowMount(ItemTitle, {
propsData: {
title,
disabled,
useH1,
},
});
@ -27,6 +28,12 @@ describe('ItemTitle', () => {
expect(findInputEl().text()).toBe('Sample title');
});
it('renders H1 if useH1 is true, otherwise renders H2', () => {
expect(wrapper.element.tagName).toBe('H2');
wrapper = createComponent({ useH1: true });
expect(wrapper.element.tagName).toBe('H1');
});
it('renders title contents with editing disabled', () => {
wrapper = createComponent({
disabled: true,

View File

@ -9,7 +9,8 @@ import {
describe('Work Item Note Activity Header', () => {
let wrapper;
const findActivityLabelHeading = () => wrapper.find('h3');
const findActivityLabelH2Heading = () => wrapper.find('h2');
const findActivityLabelH3Heading = () => wrapper.find('h3');
const findActivityFilterDropdown = () => wrapper.findByTestId('work-item-filter');
const findActivitySortDropdown = () => wrapper.findByTestId('work-item-sort');
@ -18,6 +19,7 @@ describe('Work Item Note Activity Header', () => {
sortOrder = ASC,
workItemType = 'Task',
discussionFilter = WORK_ITEM_NOTES_FILTER_ALL_NOTES,
useH2 = false,
} = {}) => {
wrapper = shallowMountExtended(WorkItemNotesActivityHeader, {
propsData: {
@ -25,6 +27,7 @@ describe('Work Item Note Activity Header', () => {
sortOrder,
workItemType,
discussionFilter,
useH2,
},
});
};
@ -34,7 +37,18 @@ describe('Work Item Note Activity Header', () => {
});
it('Should have the Activity label', () => {
expect(findActivityLabelHeading().text()).toBe(WorkItemNotesActivityHeader.i18n.activityLabel);
expect(findActivityLabelH3Heading().text()).toBe(
WorkItemNotesActivityHeader.i18n.activityLabel,
);
});
it('Should render an H2 instead of an H3 if useH2 is true', () => {
createComponent();
expect(findActivityLabelH3Heading().exists()).toBe(true);
expect(findActivityLabelH2Heading().exists()).toBe(false);
createComponent({ useH2: true });
expect(findActivityLabelH2Heading().exists()).toBe(true);
expect(findActivityLabelH3Heading().exists()).toBe(false);
});
it('Should have Activity filtering dropdown', () => {

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe BitbucketServer::Client do
RSpec.describe BitbucketServer::Client, feature_category: :importers do
let(:base_uri) { 'https://test:7990/stash/' }
let(:options) { { base_uri: base_uri, user: 'bitbucket', password: 'mypassword' } }
let(:project) { 'SOME-PROJECT' }

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe BitbucketServer::Connection do
RSpec.describe BitbucketServer::Connection, feature_category: :importers do
let(:options) { { base_uri: 'https://test:7990', user: 'bitbucket', password: 'mypassword' } }
let(:payload) { { 'test' => 1 } }
let(:headers) { { "Content-Type" => "application/json" } }
@ -11,83 +11,162 @@ RSpec.describe BitbucketServer::Connection do
subject { described_class.new(options) }
describe '#get' do
it 'returns JSON body' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 200, headers: headers)
expect(subject.get(url, { something: 1 })).to eq(payload)
before do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' })
.to_return(body: payload.to_json, status: 200, headers: headers)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 500, headers: headers)
it 'runs with retry_with_delay' do
expect(subject).to receive(:retry_with_delay).and_call_original.once
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
subject.get(url)
end
it 'throws an exception if the response is not JSON' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: 'bad data', status: 200, headers: headers)
shared_examples 'handles get requests' do
it 'returns JSON body' do
expect(subject.get(url, { something: 1 })).to eq(payload)
end
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
end
it 'throws an exception if the response is not JSON' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: 'bad data', status: 200, headers: headers)
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
end
it 'throws an exception upon a network error' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
end
end
it 'throws an exception upon a network error' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
it_behaves_like 'handles get requests'
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
context 'when the response is a 429 rate limit reached error' do
let(:response) do
instance_double(HTTParty::Response, parsed_response: payload, code: 429, headers: headers.merge('retry-after' => '0'))
end
before do
allow(Gitlab::HTTP).to receive(:get).and_return(response)
end
it 'sleeps, retries and if the error persists it fails' do
expect(Gitlab::BitbucketServerImport::Logger).to receive(:info)
.with(message: 'Retrying in 0 seconds due to 429 Too Many Requests')
.once
expect { subject.get(url) }.to raise_error(BitbucketServer::Connection::ConnectionError)
end
end
context 'when the bitbucket_server_importer_exponential_backoff feature flag is disabled' do
before do
stub_feature_flags(bitbucket_server_importer_exponential_backoff: false)
end
it_behaves_like 'handles get requests'
end
end
describe '#post' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
it 'returns JSON body' do
before do
WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
expect(subject.post(url, payload)).to eq(payload)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
it 'runs with retry_with_delay' do
expect(subject).to receive(:retry_with_delay).and_call_original.once
expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
subject.post(url, payload)
end
it 'throws an exception upon a network error' do
WebMock.stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
shared_examples 'handles post requests' do
it 'returns JSON body' do
expect(subject.post(url, payload)).to eq(payload)
end
expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
end
it 'throws an exception upon a network error' do
WebMock.stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
end
it 'throws an exception if the URI is invalid' do
stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(URI::InvalidURIError)
expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
end
end
it 'throws an exception if the URI is invalid' do
stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(URI::InvalidURIError)
it_behaves_like 'handles post requests'
expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
context 'when the bitbucket_server_importer_exponential_backoff feature flag is disabled' do
before do
stub_feature_flags(bitbucket_server_importer_exponential_backoff: false)
end
it_behaves_like 'handles post requests'
end
end
describe '#delete' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
before do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
end
context 'branch API' do
let(:branch_path) { '/projects/foo/repos/bar/branches' }
let(:branch_url) { 'https://test:7990/rest/branch-utils/1.0/projects/foo/repos/bar/branches' }
let(:path) {}
it 'returns JSON body' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
it 'runs with retry_with_delay' do
expect(subject).to receive(:retry_with_delay).and_call_original.once
expect(subject.delete(:branches, branch_path, payload)).to eq(payload)
subject.delete(:branches, branch_path, payload)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
shared_examples 'handles delete requests' do
it 'returns JSON body' do
expect(subject.delete(:branches, branch_path, payload)).to eq(payload)
end
expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
end
it 'throws an exception upon a network error' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_raise(OpenSSL::SSL::SSLError)
expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
end
end
it 'throws an exception upon a network error' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_raise(OpenSSL::SSL::SSLError)
it_behaves_like 'handles delete requests'
expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
context 'with the bitbucket_server_importer_exponential_backoff feature flag disabled' do
before do
stub_feature_flags(bitbucket_server_importer_exponential_backoff: false)
end
it_behaves_like 'handles delete requests'
end
end
end

View File

@ -0,0 +1,60 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BitbucketServer::RetryWithDelay, feature_category: :importers do
let(:service) { dummy_class.new }
let(:body) { 'test' }
let(:response) { instance_double(HTTParty::Response, body: body, code: 200) }
let(:response_caller) { -> { response } }
let(:dummy_class) do
Class.new do
def logger
@logger ||= Logger.new(File::NULL)
end
def dummy_method(response_caller)
retry_with_delay do
response_caller.call
end
end
include BitbucketServer::RetryWithDelay
end
end
subject(:execute) { service.dummy_method(response_caller) }
describe '.retry_with_delay' do
context 'when the function succeeds on the first try' do
it 'calls the function once and returns its result' do
expect(response_caller).to receive(:call).once.and_call_original
execute
end
end
context 'when the request has a status code of 429' do
let(:headers) { { 'retry-after' => '0' } }
let(:body) { 'HTTP Status 429 - Too Many Requests' }
let(:response) { instance_double(HTTParty::Response, body: body, code: 429, headers: headers) }
before do
stub_const("#{described_class}::MAXIMUM_DELAY", 0)
end
it 'calls the function again after a delay' do
expect(response_caller).to receive(:call).twice.and_call_original
expect_next_instance_of(Logger) do |logger|
expect(logger).to receive(:info)
.with(message: 'Retrying in 0 seconds due to 429 Too Many Requests')
.once
end
execute
end
end
end
end

View File

@ -0,0 +1,69 @@
# frozen_string_literal: true
require 'spec_helper'
require 'support/helpers/rails_helpers'
RSpec.describe Gitlab::Instrumentation::ConnectionPool, feature_category: :redis do
let(:option) { { name: 'test', size: 5 } }
let(:pool) { ConnectionPool.new(option) { 'nothing' } }
let_it_be(:size_gauge_args) { [:gitlab_connection_pool_size, 'Size of connection pool', {}, :all] }
let_it_be(:available_gauge_args) do
[:gitlab_connection_pool_available_count,
'Number of available connections in the pool', {}, :all]
end
subject(:checkout_pool) { pool.checkout }
describe '.checkout' do
let(:size_gauge_double) { instance_double(::Prometheus::Client::Gauge) }
context 'when tracking for the first time' do
it 'initialises gauges' do
expect(::Gitlab::Metrics).to receive(:gauge).with(*size_gauge_args).and_call_original
expect(::Gitlab::Metrics).to receive(:gauge).with(*available_gauge_args).and_call_original
checkout_pool
end
end
it 'sets the size gauge only once' do
expect(::Gitlab::Metrics.gauge(*size_gauge_args)).to receive(:set).with(
{ pool_name: 'test', pool_key: anything, connection_class: "String" }, 5).once
checkout_pool
checkout_pool
end
context 'when tracking on subsequent calls' do
before do
pool.checkout # initialise instance variables
end
it 'uses memoized gauges' do
expect(::Gitlab::Metrics).not_to receive(:gauge).with(*size_gauge_args)
expect(::Gitlab::Metrics).not_to receive(:gauge).with(*available_gauge_args)
expect(pool.instance_variable_get(:@size_gauge)).not_to receive(:set)
.with({ pool_name: 'test', pool_key: anything, connection_class: "String" }, 5)
expect(pool.instance_variable_get(:@available_gauge)).to receive(:set)
.with({ pool_name: 'test', pool_key: anything, connection_class: "String" }, 4)
checkout_pool
end
context 'when pool name is omitted' do
let(:option) { {} }
it 'uses unknown name' do
expect(pool.instance_variable_get(:@size_gauge)).not_to receive(:set)
.with({ pool_name: 'unknown', pool_key: anything, connection_class: "String" }, 5)
expect(pool.instance_variable_get(:@available_gauge)).to receive(:set)
.with({ pool_name: 'unknown', pool_key: anything, connection_class: "String" }, 4)
checkout_pool
end
end
end
end
end

View File

@ -0,0 +1,102 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Deleting a container registry protection rule', :aggregate_failures, feature_category: :container_registry do
include GraphqlHelpers
let_it_be(:project) { create(:project, :repository) }
let_it_be_with_refind(:container_protection_rule) do
create(:container_registry_protection_rule, project: project)
end
let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
let(:mutation) { graphql_mutation(:delete_container_registry_protection_rule, input) }
let(:mutation_response) { graphql_mutation_response(:delete_container_registry_protection_rule) }
let(:input) { { id: container_protection_rule.to_global_id } }
subject(:post_graphql_mutation_delete_container_registry_protection_rule) do
post_graphql_mutation(mutation, current_user: current_user)
end
shared_examples 'an erroneous reponse' do
it { post_graphql_mutation_delete_container_registry_protection_rule.tap { expect(mutation_response).to be_blank } }
it do
expect { post_graphql_mutation_delete_container_registry_protection_rule }
.not_to change { ::ContainerRegistry::Protection::Rule.count }
end
end
it_behaves_like 'a working GraphQL mutation'
it 'responds with deleted container registry protection rule' do
expect { post_graphql_mutation_delete_container_registry_protection_rule }
.to change { ::ContainerRegistry::Protection::Rule.count }.from(1).to(0)
expect_graphql_errors_to_be_empty
expect(mutation_response).to include(
'errors' => be_blank,
'containerRegistryProtectionRule' => {
'id' => container_protection_rule.to_global_id.to_s,
'containerPathPattern' => container_protection_rule.container_path_pattern,
'deleteProtectedUpToAccessLevel' => container_protection_rule.delete_protected_up_to_access_level.upcase,
'pushProtectedUpToAccessLevel' => container_protection_rule.push_protected_up_to_access_level.upcase
}
)
end
context 'with existing container registry protection rule belonging to other project' do
let_it_be(:container_protection_rule) do
create(:container_registry_protection_rule, container_path_pattern: 'protection_rule_other_project')
end
it_behaves_like 'an erroneous reponse'
it { is_expected.tap { expect_graphql_errors_to_include(/you don't have permission to perform this action/) } }
end
context 'with deleted container registry protection rule' do
let!(:container_protection_rule) do
create(:container_registry_protection_rule, project: project,
container_path_pattern: 'protection_rule_deleted').destroy!
end
it_behaves_like 'an erroneous reponse'
it { is_expected.tap { expect_graphql_errors_to_include(/you don't have permission to perform this action/) } }
end
context 'when current_user does not have permission' do
let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } }
let_it_be(:anonymous) { create(:user) }
where(:current_user) do
[ref(:developer), ref(:reporter), ref(:guest), ref(:anonymous)]
end
with_them do
it_behaves_like 'an erroneous reponse'
it { is_expected.tap { expect_graphql_errors_to_include(/you don't have permission to perform this action/) } }
end
end
context "when feature flag ':container_registry_protected_containers' disabled" do
before do
stub_feature_flags(container_registry_protected_containers: false)
end
it_behaves_like 'an erroneous reponse'
it do
post_graphql_mutation_delete_container_registry_protection_rule
expect_graphql_errors_to_include(/'container_registry_protected_containers' feature flag is disabled/)
end
end
end

View File

@ -0,0 +1,106 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ContainerRegistry::Protection::DeleteRuleService, '#execute', feature_category: :container_registry do
let_it_be(:project) { create(:project) }
let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
let_it_be_with_refind(:container_registry_protection_rule) do
create(:container_registry_protection_rule, project: project)
end
subject(:service_execute) do
described_class.new(container_registry_protection_rule, current_user: current_user).execute
end
shared_examples 'a successful service response' do
it { is_expected.to be_success }
it do
is_expected.to have_attributes(
errors: be_blank,
message: be_blank,
payload: { container_registry_protection_rule: container_registry_protection_rule }
)
end
it do
service_execute
expect { container_registry_protection_rule.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
shared_examples 'an erroneous service response' do
it { is_expected.to be_error }
it do
is_expected.to have_attributes(message: be_present, payload: { container_registry_protection_rule: be_blank })
end
it do
expect { service_execute }.not_to change { ContainerRegistry::Protection::Rule.count }
expect { container_registry_protection_rule.reload }.not_to raise_error
end
end
it_behaves_like 'a successful service response'
it 'deletes the container registry protection rule in the database' do
expect { service_execute }
.to change {
project.reload.container_registry_protection_rules
}.from([container_registry_protection_rule]).to([])
.and change { ::ContainerRegistry::Protection::Rule.count }.from(1).to(0)
end
context 'with deleted container registry protection rule' do
let!(:container_registry_protection_rule) do
create(:container_registry_protection_rule, project: project,
container_path_pattern: 'protection_rule_deleted').destroy!
end
it_behaves_like 'a successful service response'
end
context 'when error occurs during delete operation' do
before do
allow(container_registry_protection_rule).to receive(:destroy!).and_raise(StandardError.new('Some error'))
end
it_behaves_like 'an erroneous service response'
it { is_expected.to have_attributes message: /Some error/ }
end
context 'when current_user does not have permission' do
let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } }
let_it_be(:anonymous) { create(:user) }
where(:current_user) do
[ref(:developer), ref(:reporter), ref(:guest), ref(:anonymous)]
end
with_them do
it_behaves_like 'an erroneous service response'
it { is_expected.to have_attributes message: /Unauthorized to delete a container registry protection rule/ }
end
end
context 'without container registry protection rule' do
let(:container_registry_protection_rule) { nil }
it { expect { service_execute }.to raise_error(ArgumentError) }
end
context 'without current_user' do
let(:current_user) { nil }
let(:container_registry_protection_rule) { build_stubbed(:container_registry_protection_rule, project: project) }
it { expect { service_execute }.to raise_error(ArgumentError) }
end
end

View File

@ -223,7 +223,8 @@ RSpec.shared_examples "redis_shared_examples" do
end
it 'instantiates a connection pool with size 5' do
expect(ConnectionPool).to receive(:new).with(size: 5).and_call_original
expect(ConnectionPool).to receive(:new)
.with(size: 5, name: described_class.store_name.underscore).and_call_original
described_class.with { |_redis_shared_example| true }
end
@ -236,7 +237,8 @@ RSpec.shared_examples "redis_shared_examples" do
end
it 'instantiates a connection pool with a size based on the concurrency of the worker' do
expect(ConnectionPool).to receive(:new).with(size: 18 + 5).and_call_original
expect(ConnectionPool).to receive(:new)
.with(size: 18 + 5, name: described_class.store_name.underscore).and_call_original
described_class.with { |_redis_shared_example| true }
end

View File

@ -1274,10 +1274,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-3.72.0.tgz#5daaa7366913b52ea89439305067e030f967c8a5"
integrity sha512-VbSdwXxu9Y6NAXNFTROjZa83e2b8QeDAO7byqjJ0z+2Y3gGGXdw+HclAzz0Ns8B0+DMV5mV7dtmTlv/1xAXXYQ==
"@gitlab/ui@^71.1.1":
version "71.1.1"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-71.1.1.tgz#3853fc98287736992aae2464de8ba0f482a68f27"
integrity sha512-yhKjn0TJ5kI+If3T5mSQfmWkhXtzFWjr4+Qi6FTN9f3vJTOYXtzsFXtZr66V8202peJWbif4A5KpoNZIJdo8YQ==
"@gitlab/ui@^71.3.0":
version "71.4.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-71.4.0.tgz#481d594f7cdc01aac6529cc7c801221ccde13b86"
integrity sha512-6ddhlYo5wVQJ2j0AhlrmxwBpYS7UhM6sR3XeXeMRbDqJaA/17ARwyl8JMxCqVcIcGbTmDd9FJluXzObQsyUzUQ==
dependencies:
"@floating-ui/dom" "1.2.9"
bootstrap-vue "2.23.1"