Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-06-03 15:09:26 +00:00
parent c74f702c74
commit f5f6cb45c7
63 changed files with 2273 additions and 548 deletions

View File

@ -12,9 +12,9 @@ import {
TAB_QUERY_PARAM_VALUES,
MEMBER_STATE_AWAITING,
MEMBER_STATE_ACTIVE,
USER_STATE_BLOCKED_PENDING_APPROVAL,
BADGE_LABELS_AWAITING_USER_SIGNUP,
BADGE_LABELS_PENDING_OWNER_ACTION,
USER_STATE_BLOCKED,
BADGE_LABELS_AWAITING_SIGNUP,
BADGE_LABELS_PENDING,
} from '../../constants';
import RemoveGroupLinkModal from '../modals/remove_group_link_modal.vue';
import RemoveMemberModal from '../modals/remove_member_modal.vue';
@ -162,7 +162,7 @@ export default {
);
},
/**
* Returns whether the user is awaiting root approval
* Returns whether the user is blocked awaiting root approval
*
* This checks User.state exposed via MemberEntity
*
@ -170,11 +170,11 @@ export default {
* @see {@link ~/app/serializers/member_entity.rb}
* @returns {boolean}
*/
isUserPendingRootApproval(memberInviteMetadata) {
return memberInviteMetadata?.userState === USER_STATE_BLOCKED_PENDING_APPROVAL;
isUserBlocked(memberInviteMetadata) {
return memberInviteMetadata?.userState === USER_STATE_BLOCKED;
},
/**
* Returns whether the member is awaiting owner action
* Returns whether the member is awaiting state
*
* This checks Member.state exposed via MemberEntity
*
@ -183,16 +183,13 @@ export default {
* @see {@link ~/app/serializers/member_entity.rb}
* @returns {boolean}
*/
isMemberPendingOwnerAction(memberState) {
isMemberAwaiting(memberState) {
return memberState === MEMBER_STATE_AWAITING;
},
isUserAwaiting(memberInviteMetadata, memberState) {
return (
this.isUserPendingRootApproval(memberInviteMetadata) ||
this.isMemberPendingOwnerAction(memberState)
);
return this.isUserBlocked(memberInviteMetadata) || this.isMemberAwaiting(memberState);
},
shouldAddPendingOwnerActionBadge(memberInviteMetadata, memberState) {
shouldAddPendingBadge(memberInviteMetadata, memberState) {
return (
this.isUserAwaiting(memberInviteMetadata, memberState) &&
!this.isNewUser(memberInviteMetadata)
@ -209,11 +206,11 @@ export default {
*/
inviteBadge(memberInviteMetadata, memberState) {
if (this.isNewUser(memberInviteMetadata, memberState)) {
return BADGE_LABELS_AWAITING_USER_SIGNUP;
return BADGE_LABELS_AWAITING_SIGNUP;
}
if (this.shouldAddPendingOwnerActionBadge(memberInviteMetadata, memberState)) {
return BADGE_LABELS_PENDING_OWNER_ACTION;
if (this.shouldAddPendingBadge(memberInviteMetadata, memberState)) {
return BADGE_LABELS_PENDING;
}
return '';

View File

@ -160,7 +160,7 @@ export const TAB_QUERY_PARAM_VALUES = {
* This user state value comes from the User model
* see the state machine in app/models/user.rb
*/
export const USER_STATE_BLOCKED_PENDING_APPROVAL = 'blocked_pending_approval';
export const USER_STATE_BLOCKED = 'blocked_pending_approval';
/**
* This and following member state constants' values
@ -170,8 +170,8 @@ export const MEMBER_STATE_CREATED = 0;
export const MEMBER_STATE_AWAITING = 1;
export const MEMBER_STATE_ACTIVE = 2;
export const BADGE_LABELS_AWAITING_USER_SIGNUP = __('Awaiting user signup');
export const BADGE_LABELS_PENDING_OWNER_ACTION = __('Pending owner action');
export const BADGE_LABELS_AWAITING_SIGNUP = __('Awaiting user signup');
export const BADGE_LABELS_PENDING = __('Pending owner action');
export const DAYS_TO_EXPIRE_SOON = 7;

View File

@ -146,12 +146,10 @@ export default {
</gl-sprintf>
</gl-form-checkbox>
</gl-form-group>
<div
class="gl-display-flex gl-justify-content-space-between gl-p-5 gl-bg-gray-10 gl-border-t-gray-100 gl-border-t-solid gl-border-t-1"
>
<div class="gl-display-flex gl-py-5 gl-border-t-gray-100 gl-border-t-solid gl-border-t-1">
<gl-button
type="submit"
class="js-no-auto-disable"
class="js-no-auto-disable gl-mr-3"
category="primary"
variant="confirm"
data-qa-selector="commit_changes_button"

View File

@ -18,6 +18,8 @@ export default {
GlTab,
GlIntersperse,
RunnerDetail,
RunnerMaintenanceNoteDetail: () =>
import('ee_component/runner/components/runner_maintenance_note_detail.vue'),
RunnerGroups,
RunnerProjects,
RunnerJobs,
@ -106,6 +108,11 @@ export default {
/>
</template>
</runner-detail>
<runner-maintenance-note-detail
class="gl-pt-4 gl-border-t-gray-100 gl-border-t-1 gl-border-t-solid"
:value="runner.maintenanceNoteHtml"
/>
</dl>
</div>

View File

@ -31,6 +31,8 @@ export default {
GlFormGroup,
GlFormInputGroup,
GlSkeletonLoader,
RunnerMaintenanceNoteField: () =>
import('ee_component/runner/components/runner_maintenance_note_field.vue'),
RunnerUpdateCostFactorFields: () =>
import('ee_component/runner/components/runner_update_cost_factor_fields.vue'),
},
@ -115,9 +117,13 @@ export default {
<h4 class="gl-font-lg gl-my-5">{{ s__('Runners|Details') }}</h4>
<gl-skeleton-loader v-if="loading" />
<gl-form-group v-else :label="__('Description')" data-testid="runner-field-description">
<gl-form-input-group v-model="model.description" />
</gl-form-group>
<template v-else>
<gl-form-group :label="__('Description')" data-testid="runner-field-description">
<gl-form-input-group v-model="model.description" />
</gl-form-group>
<runner-maintenance-note-field v-model="model.maintenanceNote" />
</template>
<hr />

View File

@ -1,41 +1,7 @@
#import "ee_else_ce/runner/graphql/show/runner_details.fragment.graphql"
query getRunner($id: CiRunnerID!) {
runner(id: $id) {
__typename
id
shortSha
runnerType
active
accessLevel
runUntagged
locked
ipAddress
executorName
architectureName
platformName
description
maximumTimeout
jobCount
tagList
createdAt
status(legacyMode: null)
contactedAt
version
editAdminUrl
userPermissions {
updateRunner
deleteRunner
}
groups {
# Only a single group can be loaded here, while projects
# are loaded separately using the query with pagination
# parameters `runner_projects.query.graphql`.
nodes {
id
avatarUrl
name
fullName
webUrl
}
}
...RunnerDetails
}
}

View File

@ -0,0 +1,5 @@
#import "./runner_details_shared.fragment.graphql"
fragment RunnerDetails on CiRunner {
...RunnerDetailsShared
}

View File

@ -0,0 +1,39 @@
fragment RunnerDetailsShared on CiRunner {
__typename
id
shortSha
runnerType
active
accessLevel
runUntagged
locked
ipAddress
executorName
architectureName
platformName
description
maximumTimeout
jobCount
tagList
createdAt
status(legacyMode: null)
contactedAt
version
editAdminUrl
userPermissions {
updateRunner
deleteRunner
}
groups {
# Only a single group can be loaded here, while projects
# are loaded separately using the query with pagination
# parameters `runner_projects.query.graphql`.
nodes {
id
avatarUrl
name
fullName
webUrl
}
}
}

View File

@ -101,6 +101,8 @@ export default {
this.$emit('copy');
},
handleFormInputCopy(event) {
this.handleCopyButtonClick();
if (this.computedValueIsVisible) {
return;
}

View File

@ -557,6 +557,10 @@ table.code {
left: 0;
}
}
img {
max-width: 100%;
}
}
}

View File

@ -98,3 +98,5 @@ class Admin::RunnersController < Admin::ApplicationController
end
# rubocop: enable CodeReuse/ActiveRecord
end
Admin::RunnersController.prepend_mod

View File

@ -2,6 +2,8 @@
module Resolvers
class UserResolver < BaseResolver
include Gitlab::Graphql::Authorize::AuthorizeResource
description 'Retrieve a single user'
type Types::UserType, null: true
@ -23,6 +25,8 @@ module Resolvers
end
def resolve(id: nil, username: nil)
authorize!
if id
GitlabSchema.object_from_id(id, expected_type: User)
else
@ -39,5 +43,11 @@ module Resolvers
end
end
end
def authorize!
return unless Feature.enabled?(:require_auth_for_graphql_user_resolver)
raise_resource_not_available_error! unless context[:current_user].present?
end
end
end

View File

@ -47,8 +47,12 @@ module Resolvers
end
def authorize!(usernames)
authorized = Ability.allowed?(context[:current_user], :read_users_list)
authorized &&= usernames.present? if context[:current_user].blank?
if Feature.enabled?(:require_auth_for_graphql_user_resolver)
authorized = context[:current_user].present?
else
authorized = Ability.allowed?(context[:current_user], :read_users_list)
authorized &&= usernames.present? if context[:current_user].blank?
end
raise_resource_not_available_error! unless authorized
end

View File

@ -4,6 +4,8 @@ module Types
class ProjectType < BaseObject
graphql_name 'Project'
connection_type_class(Types::CountableConnectionType)
authorize :read_project
expose_permissions Types::PermissionTypes::Project

View File

@ -1109,7 +1109,7 @@ module Ci
end
def coverage_reports
Gitlab::Ci::Reports::CoverageReports.new.tap do |coverage_reports|
Gitlab::Ci::Reports::CoverageReport.new.tap do |coverage_reports|
latest_report_builds(Ci::JobArtifact.coverage_reports).includes(:project).find_each do |build|
build.collect_coverage_reports!(coverage_reports)
end

View File

@ -24,6 +24,7 @@ module PgFullTextSearchable
LONG_WORDS_REGEX = %r([A-Za-z0-9+/@]{50,}).freeze
TSVECTOR_MAX_LENGTH = 1.megabyte.freeze
TEXT_SEARCH_DICTIONARY = 'english'
URL_SCHEME_REGEX = %r{(?<=\A|\W)\w+://(?=\w+)}.freeze
def update_search_data!
tsvector_sql_nodes = self.class.pg_full_text_searchable_columns.map do |column, weight|
@ -104,6 +105,10 @@ module PgFullTextSearchable
def pg_full_text_search(search_term)
search_data_table = reflect_on_association(:search_data).klass.arel_table
# This fixes an inconsistency with how to_tsvector and websearch_to_tsquery process URLs
# See https://gitlab.com/gitlab-org/gitlab/-/issues/354784#note_905431920
search_term = remove_url_scheme(search_term)
joins(:search_data).where(
Arel::Nodes::InfixOperation.new(
'@@',
@ -115,5 +120,11 @@ module PgFullTextSearchable
)
)
end
private
def remove_url_scheme(search_term)
search_term.gsub(URL_SCHEME_REGEX, '')
end
end
end

View File

@ -4,9 +4,9 @@ class NoteSummary
attr_reader :note
attr_reader :metadata
def initialize(noteable, project, author, body, action: nil, commit_count: nil)
def initialize(noteable, project, author, body, action: nil, commit_count: nil, created_at: nil)
@note = { noteable: noteable,
created_at: noteable.system_note_timestamp,
created_at: created_at || noteable.system_note_timestamp,
project: project, author: author, note: body }
@metadata = { action: action, commit_count: commit_count }.compact

View File

@ -2,6 +2,18 @@
module SystemNotes
class IssuablesService < ::SystemNotes::BaseService
# We create cross-referenced system notes when a commit relates to an issue.
# There are two options what time to use for the system note:
# 1. The push date (default)
# 2. The commit date
#
# The commit date is useful when an existing Git repository is imported to GitLab.
# It helps to preserve an original order of all notes (comments, commits, status changes, e.t.c)
# in the imported issues. Otherwise, all commits will be linked before or after all other imported notes.
#
# See also the discussion in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/60700#note_612724683
USE_COMMIT_DATE_FOR_CROSS_REFERENCE_NOTE = false
#
# noteable_ref - Referenced noteable object
#
@ -216,7 +228,8 @@ module SystemNotes
)
else
track_cross_reference_action
create_note(NoteSummary.new(noteable, noteable.project, author, body, action: 'cross_reference'))
created_at = mentioner.created_at if USE_COMMIT_DATE_FOR_CROSS_REFERENCE_NOTE && mentioner.is_a?(Commit)
create_note(NoteSummary.new(noteable, noteable.project, author, body, action: 'cross_reference', created_at: created_at))
end
end

View File

@ -0,0 +1,8 @@
---
name: require_auth_for_graphql_user_resolver
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/88020
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/362953
milestone: '15.1'
type: development
group: group::authentication and authorization
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: use_primary_and_secondary_stores_for_duplicate_jobs
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/
rollout_issue_url:
milestone:
type: development
group: group::scalability
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: use_primary_store_as_default_for_duplicate_jobs
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/
rollout_issue_url:
milestone:
type: development
group: group::scalability
default_enabled: false

View File

@ -19,3 +19,4 @@ Gitlab::Redis::SharedState.with { nil }
Gitlab::Redis::TraceChunks.with { nil }
Gitlab::Redis::RateLimiting.with { nil }
Gitlab::Redis::Sessions.with { nil }
Gitlab::Redis::DuplicateJobs.with { nil }

View File

@ -7779,6 +7779,7 @@ The connection type for [`Project`](#project).
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="projectconnectioncount"></a>`count` | [`Int!`](#int) | Total count of collection. |
| <a id="projectconnectionedges"></a>`edges` | [`[ProjectEdge]`](#projectedge) | A list of edges. |
| <a id="projectconnectionnodes"></a>`nodes` | [`[Project]`](#project) | A list of nodes. |
| <a id="projectconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |

View File

@ -503,6 +503,6 @@ View the production Sidekiq log and filter for:
- `json.meta.caller_id: <MyBackgroundMigrationSchedulingMigrationClassName>`
- `json.args: <MyBackgroundMigrationClassName>`
Looking at the `json.error_class`, `json.error_message` and `json.error_backtrace` values may be helpful in understanding why the jobs failed.
Looking at the `json.exception.class`, `json.exception.message`, `json.exception.backtrace`, and `json.exception.sql` values may be helpful in understanding why the jobs failed.
Depending on when and how the failure occurred, you may find other helpful information by filtering with `json.class: <MyBackgroundMigrationClassName>`.

View File

@ -41,12 +41,18 @@ Use the following template to help you get started. Be sure to list any
required attributes first in the table.
````markdown
## Descriptive title
## API name
> Version history note.
One or two sentence description of what endpoint does.
### Method title
> Version history note.
Description of the method.
```plaintext
METHOD /endpoint
```
@ -83,8 +89,40 @@ Example response:
```
````
Adjust the [version history note accordingly](versions.md#add-a-version-history-item)
to describe the GitLab release that introduced the API call.
## Version history
Add [version history](versions.md#documenting-version-specific-features)
to describe new or updated API calls.
To add version history for an individual attribute, include it in the version history
for the section. For example:
```markdown
### Edit a widget
> `widget_message` [introduced](<link-to-issue>) in GitLab 14.3.
```
## Attribute deprecation
To deprecate an attribute:
1. Add a version history note.
```markdown
> - `widget_name` [deprecated](<link-to-issue>) in GitLab 14.7.
```
1. Add inline deprecation text to the description.
```markdown
| Attribute | Type | Required | Description |
|:--------------|:-------|:-----------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------|
| `widget_name` | string | **{dotted-circle}** No | [Deprecated](<link-to-issue>) in GitLab 14.7 and is planned for removal in 15.4. Use `widget_id` instead. The name of the widget. |
```
1. Optional. To widely announce the change, or if it's a breaking change,
[update the deprecations and removals documentation](../deprecation_guidelines/#update-the-deprecations-and-removals-documentation).
## Method description

View File

@ -224,6 +224,16 @@ actors.
Feature.enabled?(:some_feature, group)
```
Multiple actors can be passed together in a comma-separated form:
```shell
/chatops run feature set --project=gitlab-org/gitlab,example-org/example-project some_feature true
/chatops run feature set --group=gitlab-org,example-org some_feature true
/chatops run feature set --namespace=gitlab-org,example-org some_feature true
```
Lastly, to verify that the feature is deemed stable in as many cases as possible,
you should fully roll out the feature by enabling the flag **globally** by running:

View File

@ -179,9 +179,12 @@ bin/feature-flag use_primary_store_as_default_for_foo
By enabling `use_primary_and_secondary_stores_for_foo` feature flag, our `Gitlab::Redis::Foo` will use `MultiStore` to write to both new Redis instance
and the [old (fallback-instance)](#fallback-instance).
If we fail to fetch data from the new instance, we will fallback and read from the old Redis instance.
We can monitor logs for `Gitlab::Redis::MultiStore::ReadFromPrimaryError`, and also the Prometheus counter `gitlab_redis_multi_store_read_fallback_total`.
Once we stop seeing them, this means that we are no longer relying on the data stored on the old Redis store.
For pipelined commands (`pipelined` and `multi`), we execute the entire operation in both stores and then compare the results. If they differ, we emit a
`Gitlab::Redis::MultiStore:PipelinedDiffError` error, and track it in the `gitlab_redis_multi_store_pipelined_diff_error_total` Prometheus counter.
Once we stop seeing those errors, this means that we are no longer relying on the data stored on the old Redis store.
At this point, we are probably safe to move the traffic to the new Redis store.
By enabling `use_primary_store_as_default_for_foo` feature flag, the `MultiStore` will use `primary_store` (new instance) as default Redis store.
@ -213,6 +216,15 @@ MultiStore implements read and write Redis commands separately.
- `del`
- `pipelined`
- `flushdb`
- `rpush`
##### Pipelined commands
**NOTE:** The Ruby block passed to these commands will be executed twice, once per each store.
Thus, excluding the Redis operations performed, the block should be idempotent.
- `pipelined`
- `multi`
When a command outside of the supported list is used, `method_missing` will pass it to the old Redis instance and keep track of it.
This ensures that anything unexpected behaves like it would before.
@ -223,17 +235,19 @@ a developer will need to add an implementation for missing Redis commands before
##### Errors
| error | message |
|-------------------------------------------------|-----------------------------------------------------------------------|
| error | message |
|---------------------------------------------------|---------------------------------------------------------------------------------------------|
| `Gitlab::Redis::MultiStore::ReadFromPrimaryError` | Value not found on the Redis primary store. Read from the Redis secondary store successful. |
| `Gitlab::Redis::MultiStore::MethodMissingError` | Method missing. Falling back to execute method on the Redis secondary store. |
| `Gitlab::Redis::MultiStore::PipelinedDiffError` | Pipelined command executed on both stores successfully but results differ between them. |
| `Gitlab::Redis::MultiStore::MethodMissingError` | Method missing. Falling back to execute method on the Redis secondary store. |
##### Metrics
| metrics name | type | labels | description |
|-------------------------------------------------|--------------------|------------------------|----------------------------------------------------|
| `gitlab_redis_multi_store_read_fallback_total` | Prometheus Counter | command, instance_name | Client side Redis MultiStore reading fallback total|
| `gitlab_redis_multi_store_method_missing_total` | Prometheus Counter | command, instance_name | Client side Redis MultiStore method missing total |
| metrics name | type | labels | description |
|-------------------------------------------------------|--------------------|------------------------|--------------------------------------------------------|
| `gitlab_redis_multi_store_read_fallback_total` | Prometheus Counter | command, instance_name | Client side Redis MultiStore reading fallback total |
| `gitlab_redis_multi_store_pipelined_diff_error_total` | Prometheus Counter | command, instance_name | Redis MultiStore pipelined command diff between stores |
| `gitlab_redis_multi_store_method_missing_total` | Prometheus Counter | command, instance_name | Client side Redis MultiStore method missing total |
## Step 4: clean up after the migration

View File

@ -142,7 +142,9 @@ To restrict visibility levels for projects, snippets, and selected pages:
1. In the **Restricted visibility levels** section, select the desired visibility levels to restrict.
If you restrict the **Public** level:
- User profiles are only visible to logged in users via the Web interface.
- User attributes are only visible to authenticated users via the GraphQL API.
- User attributes via the GraphQL API are:
- Not visible in [GitLab 15.1 and later](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/88020).
- Only visible to authenticated users between [GitLab 13.1](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/33195) and GitLab 15.0.
1. Select **Save changes**.
For more details on project visibility, see

View File

@ -40,7 +40,9 @@ including:
## Group webhooks **(PREMIUM)**
You can configure a group webhook, which is triggered by events
that occur across all projects in the group.
that occur across all projects in the group. If you configure identical webhooks
in a group and a project, they are both triggered by an event in the
project.
Group webhooks can also be configured to listen for events that are
specific to a group, including:
@ -171,7 +173,8 @@ work you must have Ruby installed.
ruby print_http_body.rb 8000
```
1. In GitLab, add your webhook receiver as `http://my.host:8000/`.
1. In GitLab, [configure the webhook](#configure-a-webhook-in-gitlab) and add your
receiver's URL, for example, `http://receiver.example.com:8000/`.
1. Select **Test**. You should see something like this in the console:

View File

@ -224,7 +224,7 @@ This alternative ensures the compliance pipeline does not re-start the parent pi
## Configure project visibility, features, and permissions
To configure project permissions:
To configure visibility, features, and permissions for a project:
1. On the top bar, select **Menu > Projects** and find your project.
1. On the left sidebar, select **Settings > General**.
@ -234,27 +234,6 @@ To configure project permissions:
1. Use the [toggles](#project-feature-settings) to enable or disable features in the project.
1. Select **Save changes**.
When you disable a feature, the following additional features are disabled:
- If you disable the **Issues** feature, project users cannot use:
- **Issue Boards**
- **Service Desk**
- Project users can still access **Milestones** from merge requests.
- If you disable **Issues** and **Merge Requests**, project users cannot use:
- **Labels**
- **Milestones**
- If you disable **Repository**, project users cannot access:
- **Merge requests**
- **CI/CD**
- **Container Registry**
- **Git Large File Storage**
- **Packages**
- Metrics dashboard access requires reading project environments and deployments.
Users with access to the metrics dashboard can also access environments and deployments.
### Project feature settings
Use the toggles to enable or disable features in the project.
@ -278,6 +257,27 @@ Use the toggles to enable or disable features in the project.
| **Operations** | ✓ | Control access to Operations-related features, including [Operations Dashboard](../../../operations/index.md), [Environments and Deployments](../../../ci/environments/index.md), [Feature Flags](../../../operations/feature_flags.md). |
| **Metrics Dashboard** | ✓ | Control access to [metrics dashboard](../integrations/prometheus.md). |
When you disable a feature, the following additional features are also disabled:
- If you disable the **Issues** feature, project users cannot use:
- **Issue Boards**
- **Service Desk**
- Project users can still access **Milestones** from merge requests.
- If you disable **Issues** and **Merge Requests**, project users cannot use:
- **Labels**
- **Milestones**
- If you disable **Repository**, project users cannot access:
- **Merge requests**
- **CI/CD**
- **Container Registry**
- **Git Large File Storage**
- **Packages**
- Metrics dashboard access requires reading project environments and deployments.
Users with access to the metrics dashboard can also access environments and deployments.
## Disabling the CVE ID request button **(FREE SAAS)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41203) in GitLab 13.4, only for public projects on GitLab.com.

View File

@ -3,7 +3,7 @@
module Gitlab
module Ci
module Reports
class CoverageReports
class CoverageReport
attr_reader :files
def initialize

View File

@ -10,7 +10,7 @@ module Gitlab
attr_reader :marker_ranges
attr_writer :text, :rich_text
attr_accessor :index, :old_pos, :new_pos, :line_code, :type
attr_accessor :index, :old_pos, :new_pos, :line_code, :type, :embedded_image
def initialize(text, type, index, old_pos, new_pos, parent_file: nil, line_code: nil, rich_text: nil)
@text = text

View File

@ -14,6 +14,7 @@ module Gitlab
LOG_IPYNBDIFF_TIMEOUT = 'IPYNB_DIFF_TIMEOUT'
LOG_IPYNBDIFF_INVALID = 'IPYNB_DIFF_INVALID'
LOG_IPYNBDIFF_TRUNCATED = 'IPYNB_DIFF_TRUNCATED'
EMBEDDED_IMAGE_PATTERN = ' ![](data:image'
attr_reader :source_diff
@ -69,7 +70,6 @@ module Gitlab
Timeout.timeout(timeout_time) do
IpynbDiff.diff(source_diff.old_blob&.data, source_diff.new_blob&.data,
raise_if_invalid_nb: true,
hide_images: true,
diffy_opts: { include_diff_info: true })&.tap do
log_event(LOG_IPYNBDIFF_GENERATED)
end
@ -109,6 +109,9 @@ module Gitlab
line.type = "#{line.type || 'unchanged'}-nomappinginraw" unless addition_line_maps[line.new_pos] || removal_line_maps[line.old_pos]
line.line_code = line_code(line)
line.rich_text = image_as_rich_text(line)
line
end
@ -152,6 +155,18 @@ module Gitlab
Gitlab::ErrorTracking.log_exception(error) if error
nil
end
def image_as_rich_text(line)
# Strip the initial +, -, or space for the diff context
line_text = line.text[1..]
if line_text.starts_with?(EMBEDDED_IMAGE_PATTERN)
image_body = line_text.delete_prefix(EMBEDDED_IMAGE_PATTERN).delete_suffix(')')
"<img src=\"data:image#{CGI.escapeHTML(image_body)}\">".html_safe
else
line.rich_text
end
end
end
end
end

View File

@ -74,3 +74,5 @@ module Gitlab
end
end
end
Gitlab::GitAccessProject.prepend_mod_with('Gitlab::GitAccessProject')

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
module Gitlab
module Redis
# Pseudo-store to transition `Gitlab::SidekiqMiddleware::DuplicateJobs` from
# using `Sidekiq.redis` to using the `SharedState` redis store.
class DuplicateJobs < ::Gitlab::Redis::Wrapper
class << self
def store_name
'SharedState'
end
private
def redis
primary_store = ::Redis.new(Gitlab::Redis::SharedState.params)
secondary_store = ::Redis.new(Gitlab::Redis::Queues.params)
MultiStore.new(primary_store, secondary_store, name.demodulize)
end
end
end
end
end

View File

@ -0,0 +1,297 @@
# frozen_string_literal: true
module Gitlab
module Redis
class MultiStore
include Gitlab::Utils::StrongMemoize
class ReadFromPrimaryError < StandardError
def message
'Value not found on the redis primary store. Read from the redis secondary store successful.'
end
end
class PipelinedDiffError < StandardError
def message
'Pipelined command executed on both stores successfully but results differ between them.'
end
end
class MethodMissingError < StandardError
def message
'Method missing. Falling back to execute method on the redis secondary store.'
end
end
attr_reader :primary_store, :secondary_store, :instance_name
FAILED_TO_READ_ERROR_MESSAGE = 'Failed to read from the redis primary_store.'
FAILED_TO_WRITE_ERROR_MESSAGE = 'Failed to write to the redis primary_store.'
FAILED_TO_RUN_PIPELINE = 'Failed to execute pipeline on the redis primary_store.'
SKIP_LOG_METHOD_MISSING_FOR_COMMANDS = %i(info).freeze
READ_COMMANDS = %i(
get
mget
smembers
scard
).freeze
WRITE_COMMANDS = %i(
set
setnx
setex
sadd
srem
del
flushdb
rpush
).freeze
PIPELINED_COMMANDS = %i(
pipelined
multi
).freeze
# To transition between two Redis store, `primary_store` should be the target store,
# and `secondary_store` should be the current store. Transition is controlled with feature flags:
#
# - At the default state, all read and write operations are executed in the secondary instance.
# - Turning use_primary_and_secondary_stores_for_<instance_name> on: The store writes to both instances.
# The read commands are executed in primary, but fallback to secondary.
# Other commands are executed in the the default instance (Secondary).
# - Turning use_primary_store_as_default_for_<instance_name> on: The behavior is the same as above,
# but other commands are executed in the primary now.
# - Turning use_primary_and_secondary_stores_for_<instance_name> off: commands are executed in the primary store.
def initialize(primary_store, secondary_store, instance_name)
@primary_store = primary_store
@secondary_store = secondary_store
@instance_name = instance_name
validate_stores!
end
# rubocop:disable GitlabSecurity/PublicSend
READ_COMMANDS.each do |name|
define_method(name) do |*args, &block|
if use_primary_and_secondary_stores?
read_command(name, *args, &block)
else
default_store.send(name, *args, &block)
end
end
end
WRITE_COMMANDS.each do |name|
define_method(name) do |*args, **kwargs, &block|
if use_primary_and_secondary_stores?
write_command(name, *args, **kwargs, &block)
else
default_store.send(name, *args, **kwargs, &block)
end
end
end
PIPELINED_COMMANDS.each do |name|
define_method(name) do |*args, **kwargs, &block|
if use_primary_and_secondary_stores?
pipelined_both(name, *args, **kwargs, &block)
else
default_store.send(name, *args, **kwargs, &block)
end
end
end
def method_missing(...)
return @instance.send(...) if @instance
log_method_missing(...)
default_store.send(...)
end
# rubocop:enable GitlabSecurity/PublicSend
def respond_to_missing?(command_name, include_private = false)
true
end
# This is needed because of Redis::Rack::Connection is requiring Redis::Store
# https://github.com/redis-store/redis-rack/blob/a833086ba494083b6a384a1a4e58b36573a9165d/lib/redis/rack/connection.rb#L15
# Done similarly in https://github.com/lsegal/yard/blob/main/lib/yard/templates/template.rb#L122
def is_a?(klass)
return true if klass == default_store.class
super(klass)
end
alias_method :kind_of?, :is_a?
def to_s
use_primary_and_secondary_stores? ? primary_store.to_s : default_store.to_s
end
def use_primary_and_secondary_stores?
feature_enabled?("use_primary_and_secondary_stores_for")
end
def use_primary_store_as_default?
feature_enabled?("use_primary_store_as_default_for")
end
def increment_pipelined_command_error_count(command_name)
@pipelined_command_error ||= Gitlab::Metrics.counter(:gitlab_redis_multi_store_pipelined_diff_error_total,
'Redis MultiStore pipelined command diff between stores')
@pipelined_command_error.increment(command: command_name, instance_name: instance_name)
end
def increment_read_fallback_count(command_name)
@read_fallback_counter ||= Gitlab::Metrics.counter(:gitlab_redis_multi_store_read_fallback_total,
'Client side Redis MultiStore reading fallback')
@read_fallback_counter.increment(command: command_name, instance_name: instance_name)
end
def increment_method_missing_count(command_name)
@method_missing_counter ||= Gitlab::Metrics.counter(:gitlab_redis_multi_store_method_missing_total,
'Client side Redis MultiStore method missing')
@method_missing_counter.increment(command: command_name, instance_name: instance_name)
end
def log_error(exception, command_name, extra = {})
Gitlab::ErrorTracking.log_exception(
exception,
command_name: command_name,
extra: extra.merge(instance_name: instance_name))
end
private
# @return [Boolean]
def feature_enabled?(prefix)
feature_table_exists? &&
Feature.enabled?("#{prefix}_#{instance_name.underscore}") &&
!same_redis_store?
end
# @return [Boolean]
def feature_table_exists?
Feature::FlipperFeature.table_exists?
rescue StandardError
false
end
def default_store
use_primary_store_as_default? ? primary_store : secondary_store
end
def log_method_missing(command_name, *_args)
return if SKIP_LOG_METHOD_MISSING_FOR_COMMANDS.include?(command_name)
log_error(MethodMissingError.new, command_name)
increment_method_missing_count(command_name)
end
def read_command(command_name, *args, &block)
if @instance
send_command(@instance, command_name, *args, &block)
else
read_one_with_fallback(command_name, *args, &block)
end
end
def write_command(command_name, *args, **kwargs, &block)
if @instance
send_command(@instance, command_name, *args, **kwargs, &block)
else
write_both(command_name, *args, **kwargs, &block)
end
end
def read_one_with_fallback(command_name, *args, &block)
begin
value = send_command(primary_store, command_name, *args, &block)
rescue StandardError => e
log_error(e, command_name,
multi_store_error_message: FAILED_TO_READ_ERROR_MESSAGE)
end
value || fallback_read(command_name, *args, &block)
end
def fallback_read(command_name, *args, &block)
value = send_command(secondary_store, command_name, *args, &block)
if value
log_error(ReadFromPrimaryError.new, command_name)
increment_read_fallback_count(command_name)
end
value
end
def write_both(command_name, *args, **kwargs, &block)
begin
send_command(primary_store, command_name, *args, **kwargs, &block)
rescue StandardError => e
log_error(e, command_name,
multi_store_error_message: FAILED_TO_WRITE_ERROR_MESSAGE)
end
send_command(secondary_store, command_name, *args, **kwargs, &block)
end
# Run the entire pipeline on both stores. We assume that `&block` is idempotent.
def pipelined_both(command_name, *args, **kwargs, &block)
begin
result_primary = send_command(primary_store, command_name, *args, **kwargs, &block)
rescue StandardError => e
log_error(e, command_name, multi_store_error_message: FAILED_TO_RUN_PIPELINE)
end
result_secondary = send_command(secondary_store, command_name, *args, **kwargs, &block)
# Pipelined commands return an array with all results. If they differ,
# log an error
if result_primary != result_secondary
log_error(PipelinedDiffError.new, command_name)
increment_pipelined_command_error_count(command_name)
end
result_secondary
end
def same_redis_store?
strong_memoize(:same_redis_store) do
# <Redis client v4.4.0 for redis:///path_to/redis/redis.socket/5>"
primary_store.inspect == secondary_store.inspect
end
end
# rubocop:disable GitlabSecurity/PublicSend
def send_command(redis_instance, command_name, *args, **kwargs, &block)
if block_given?
# Make sure that block is wrapped and executed only on the redis instance that is executing the block
redis_instance.send(command_name, *args, **kwargs) do |*params|
with_instance(redis_instance, *params, &block)
end
else
redis_instance.send(command_name, *args, **kwargs)
end
end
# rubocop:enable GitlabSecurity/PublicSend
def with_instance(instance, *params)
@instance = instance
yield(*params)
ensure
@instance = nil
end
def validate_stores!
raise ArgumentError, 'primary_store is required' unless primary_store
raise ArgumentError, 'secondary_store is required' unless secondary_store
raise ArgumentError, 'instance_name is required' unless instance_name
raise ArgumentError, 'invalid primary_store' unless primary_store.is_a?(::Redis)
raise ArgumentError, 'invalid secondary_store' unless secondary_store.is_a?(::Redis)
end
end
end
end

View File

@ -11,7 +11,11 @@ module Gitlab
def parse_job(job)
# Error information from the previous try is in the payload for
# displaying in the Sidekiq UI, but is very confusing in logs!
job = job.except('error_backtrace', 'error_class', 'error_message')
job = job.except(
'error_backtrace', 'error_class', 'error_message',
'exception.backtrace', 'exception.class', 'exception.message', 'exception.sql'
)
job['class'] = job.delete('wrapped') if job['wrapped'].present?
job['job_size_bytes'] = Sidekiq.dump_json(job['args']).bytesize

View File

@ -79,9 +79,14 @@ module Gitlab
if job_exception
payload['message'] = "#{message}: fail: #{payload['duration_s']} sec"
payload['job_status'] = 'fail'
payload['error_message'] = job_exception.message
payload['error_class'] = job_exception.class.name
add_exception_backtrace!(job_exception, payload)
Gitlab::ExceptionLogFormatter.format!(job_exception, payload)
# Deprecated fields for compatibility
# See https://gitlab.com/gitlab-org/gitlab/-/issues/364241
payload['error_class'] = payload['exception.class']
payload['error_message'] = payload['exception.message']
payload['error_backtrace'] = payload['exception.backtrace']
else
payload['message'] = "#{message}: done: #{payload['duration_s']} sec"
payload['job_status'] = 'done'
@ -98,12 +103,6 @@ module Gitlab
payload['completed_at'] = Time.now.utc.to_f
end
def add_exception_backtrace!(job_exception, payload)
return if job_exception.backtrace.blank?
payload['error_backtrace'] = Rails.backtrace_cleaner.clean(job_exception.backtrace)
end
def elapsed(t0)
t1 = get_time
{ duration: t1[:now] - t0[:now] }

View File

@ -63,7 +63,7 @@ module Gitlab
read_jid = nil
read_wal_locations = {}
Sidekiq.redis do |redis|
with_redis do |redis|
redis.multi do |multi|
multi.set(idempotency_key, jid, ex: expiry, nx: true)
read_wal_locations = check_existing_wal_locations!(multi, expiry)
@ -81,7 +81,7 @@ module Gitlab
def update_latest_wal_location!
return unless job_wal_locations.present?
Sidekiq.redis do |redis|
with_redis do |redis|
redis.multi do |multi|
job_wal_locations.each do |connection_name, location|
multi.eval(
@ -100,20 +100,19 @@ module Gitlab
strong_memoize(:latest_wal_locations) do
read_wal_locations = {}
Sidekiq.redis do |redis|
with_redis do |redis|
redis.multi do |multi|
job_wal_locations.keys.each do |connection_name|
read_wal_locations[connection_name] = multi.lindex(wal_location_key(connection_name), 0)
end
end
end
read_wal_locations.transform_values(&:value).compact
end
end
def delete!
Sidekiq.redis do |redis|
with_redis do |redis|
redis.multi do |multi|
multi.del(idempotency_key, deduplicated_flag_key)
delete_wal_locations!(multi)
@ -140,7 +139,7 @@ module Gitlab
def set_deduplicated_flag!(expiry = duplicate_key_ttl)
return unless reschedulable?
Sidekiq.redis do |redis|
with_redis do |redis|
redis.set(deduplicated_flag_key, DEDUPLICATED_FLAG_VALUE, ex: expiry, nx: true)
end
end
@ -148,7 +147,7 @@ module Gitlab
def should_reschedule?
return false unless reschedulable?
Sidekiq.redis do |redis|
with_redis do |redis|
redis.get(deduplicated_flag_key).present?
end
end
@ -272,6 +271,18 @@ module Gitlab
def reschedulable?
!scheduled? && options[:if_deduplicated] == :reschedule_once
end
def with_redis
if Feature.enabled?(:use_primary_and_secondary_stores_for_duplicate_jobs) ||
Feature.enabled?(:use_primary_store_as_default_for_duplicate_jobs)
# TODO: Swap for Gitlab::Redis::SharedState after store transition
# https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/923
Gitlab::Redis::DuplicateJobs.with { |redis| yield redis }
else
# Keep the old behavior intact if neither feature flag is turned on
Sidekiq.redis { |redis| yield redis }
end
end
end
end
end

View File

@ -32681,6 +32681,9 @@ msgstr ""
msgid "Runners|Active"
msgstr ""
msgid "Runners|Add notes, like who owns the runner or what it should be used for."
msgstr ""
msgid "Runners|All"
msgstr ""
@ -32806,6 +32809,9 @@ msgstr ""
msgid "Runners|Locked to this project"
msgstr ""
msgid "Runners|Maintenance note"
msgstr ""
msgid "Runners|Maximum job timeout"
msgstr ""

View File

@ -0,0 +1,28 @@
import { createWrapper, ErrorWrapper } from '@vue/test-utils';
/**
* Find the definition (<dd>) that corresponds to this term (<dt>)
*
* Given html in the `wrapper`:
*
* <dl>
* <dt>My label</dt>
* <dd>Value</dd>
* </dl>
*
* findDd('My label', wrapper)
*
* Returns `<dd>Value</dd>`
*
* @param {object} wrapper - Parent wrapper
* @param {string} dtLabel - Label for this value
* @returns Wrapper
*/
export const findDd = (dtLabel, wrapper) => {
const dt = wrapper.findByText(dtLabel).element;
const dd = dt.nextElementSibling;
if (dt.tagName === 'DT' && dd.tagName === 'DD') {
return createWrapper(dd, {});
}
return ErrorWrapper(dtLabel);
};

View File

@ -16,9 +16,9 @@ import {
MEMBER_STATE_CREATED,
MEMBER_STATE_AWAITING,
MEMBER_STATE_ACTIVE,
USER_STATE_BLOCKED_PENDING_APPROVAL,
BADGE_LABELS_AWAITING_USER_SIGNUP,
BADGE_LABELS_PENDING_OWNER_ACTION,
USER_STATE_BLOCKED,
BADGE_LABELS_AWAITING_SIGNUP,
BADGE_LABELS_PENDING,
TAB_QUERY_PARAM_VALUES,
} from '~/members/constants';
import {
@ -133,14 +133,14 @@ describe('MembersTable', () => {
describe('Invited column', () => {
describe.each`
state | userState | expectedBadgeLabel
${MEMBER_STATE_CREATED} | ${null} | ${BADGE_LABELS_AWAITING_USER_SIGNUP}
${MEMBER_STATE_CREATED} | ${USER_STATE_BLOCKED_PENDING_APPROVAL} | ${BADGE_LABELS_PENDING_OWNER_ACTION}
${MEMBER_STATE_AWAITING} | ${''} | ${BADGE_LABELS_AWAITING_USER_SIGNUP}
${MEMBER_STATE_AWAITING} | ${USER_STATE_BLOCKED_PENDING_APPROVAL} | ${BADGE_LABELS_PENDING_OWNER_ACTION}
${MEMBER_STATE_AWAITING} | ${'something_else'} | ${BADGE_LABELS_PENDING_OWNER_ACTION}
${MEMBER_STATE_ACTIVE} | ${null} | ${''}
${MEMBER_STATE_ACTIVE} | ${'something_else'} | ${''}
state | userState | expectedBadgeLabel
${MEMBER_STATE_CREATED} | ${null} | ${BADGE_LABELS_AWAITING_SIGNUP}
${MEMBER_STATE_CREATED} | ${USER_STATE_BLOCKED} | ${BADGE_LABELS_PENDING}
${MEMBER_STATE_AWAITING} | ${''} | ${BADGE_LABELS_AWAITING_SIGNUP}
${MEMBER_STATE_AWAITING} | ${USER_STATE_BLOCKED} | ${BADGE_LABELS_PENDING}
${MEMBER_STATE_AWAITING} | ${'something_else'} | ${BADGE_LABELS_PENDING}
${MEMBER_STATE_ACTIVE} | ${null} | ${''}
${MEMBER_STATE_ACTIVE} | ${'something_else'} | ${''}
`('Invited Badge', ({ state, userState, expectedBadgeLabel }) => {
it(`${
expectedBadgeLabel ? 'shows' : 'hides'

View File

@ -1,8 +1,8 @@
import { GlSprintf, GlIntersperse, GlTab } from '@gitlab/ui';
import { createWrapper, ErrorWrapper } from '@vue/test-utils';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import { useFakeDate } from 'helpers/fake_date';
import { findDd } from 'helpers/dl_locator_helper';
import { ACCESS_LEVEL_REF_PROTECTED, ACCESS_LEVEL_NOT_PROTECTED } from '~/runner/constants';
import RunnerDetails from '~/runner/components/runner_details.vue';
@ -24,20 +24,6 @@ describe('RunnerDetails', () => {
useFakeDate(mockNow);
/**
* Find the definition (<dd>) that corresponds to this term (<dt>)
* @param {string} dtLabel - Label for this value
* @returns Wrapper
*/
const findDd = (dtLabel) => {
const dt = wrapper.findByText(dtLabel).element;
const dd = dt.nextElementSibling;
if (dt.tagName === 'DT' && dd.tagName === 'DD') {
return createWrapper(dd, {});
}
return ErrorWrapper(dtLabel);
};
const findDetailGroups = () => wrapper.findComponent(RunnerGroups);
const findRunnersJobs = () => wrapper.findComponent(RunnersJobs);
const findJobCountBadge = () => wrapper.findByTestId('job-count-badge');
@ -108,7 +94,7 @@ describe('RunnerDetails', () => {
});
it(`displays expected value "${expectedValue}"`, () => {
expect(findDd(field).text()).toBe(expectedValue);
expect(findDd(field, wrapper).text()).toBe(expectedValue);
});
});
@ -123,7 +109,7 @@ describe('RunnerDetails', () => {
stubs,
});
expect(findDd('Tags').text().replace(/\s+/g, ' ')).toBe('tag-1 tag-2');
expect(findDd('Tags', wrapper).text().replace(/\s+/g, ' ')).toBe('tag-1 tag-2');
});
it('displays "None" when runner has no tags', () => {
@ -134,7 +120,7 @@ describe('RunnerDetails', () => {
stubs,
});
expect(findDd('Tags').text().replace(/\s+/g, ' ')).toBe('None');
expect(findDd('Tags', wrapper).text().replace(/\s+/g, ' ')).toBe('None');
});
});

View File

@ -77,6 +77,15 @@ describe('InputCopyToggleVisibility', () => {
expect(event.preventDefault).toHaveBeenCalled();
});
it('emits `copy` event when manually copied the token', () => {
expect(wrapper.emitted('copy')).toBeUndefined();
findFormInput().element.dispatchEvent(createCopyEvent());
expect(wrapper.emitted('copy')).toHaveLength(1);
expect(wrapper.emitted('copy')[0]).toEqual([]);
});
describe('visibility toggle button', () => {
it('renders a reveal button', () => {
const revealButton = findRevealButton();

View File

@ -6,8 +6,41 @@ RSpec.describe Resolvers::UserResolver do
include GraphqlHelpers
describe '#resolve' do
let_it_be(:current_user) { nil }
let_it_be(:user) { create(:user) }
shared_examples 'queries user' do
context 'authenticated access' do
let_it_be(:current_user) { create(:user) }
it 'returns the correct user' do
expect(
resolve_user(args)
).to eq(user)
end
end
context 'unauthenticated access' do
it 'forbids search' do
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
resolve_user(args)
end
end
context 'require_auth_for_graphql_user_resolver feature flag is disabled' do
before do
stub_feature_flags(require_auth_for_graphql_user_resolver: false)
end
it 'returns the correct user' do
expect(
resolve_user(args)
).to eq(user)
end
end
end
end
context 'when neither an ID or a username is provided' do
it 'generates an ArgumentError' do
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
@ -23,25 +56,21 @@ RSpec.describe Resolvers::UserResolver do
end
context 'by username' do
it 'returns the correct user' do
expect(
resolve_user(username: user.username)
).to eq(user)
include_examples "queries user" do
let(:args) { { username: user.username } }
end
end
context 'by ID' do
it 'returns the correct user' do
expect(
resolve_user(id: user.to_global_id)
).to eq(user)
include_examples "queries user" do
let(:args) { { id: user.to_global_id } }
end
end
end
private
def resolve_user(args = {})
sync(resolve(described_class, args: args))
def resolve_user(args = {}, context = { current_user: current_user })
sync(resolve(described_class, args: args, ctx: context))
end
end

View File

@ -14,14 +14,6 @@ RSpec.describe Resolvers::UsersResolver do
end
describe '#resolve' do
it 'generates an error when read_users_list is not authorized' do
expect(Ability).to receive(:allowed?).with(current_user, :read_users_list).and_return(false)
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
resolve_users
end
end
context 'when no arguments are passed' do
it 'returns all users' do
expect(resolve_users).to contain_exactly(user1, user2, current_user)
@ -79,8 +71,26 @@ RSpec.describe Resolvers::UsersResolver do
end
end
it 'allows to search by username' do
expect(resolve_users(args: { usernames: [user1.username] })).to contain_exactly(user1)
it 'prohibits search by username' do
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
resolve_users(args: { usernames: [user1.username] })
end
end
context 'require_auth_for_graphql_user_resolver feature flag is disabled' do
before do
stub_feature_flags(require_auth_for_graphql_user_resolver: false)
end
it 'prohibits search without usernames passed' do
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
resolve_users
end
end
it 'allows to search by username' do
expect(resolve_users(args: { usernames: [user1.username] })).to contain_exactly(user1)
end
end
end
end

View File

@ -42,6 +42,34 @@ RSpec.describe GitlabSchema.types['Project'] do
expect(described_class).to include_graphql_fields(*expected_fields)
end
describe 'count' do
let_it_be(:user) { create(:user) }
let(:query) do
%(
query {
projects {
count
edges {
node {
id
}
}
}
}
)
end
subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
it 'returns valid projects count' do
create(:project, namespace: user.namespace)
create(:project, namespace: user.namespace)
expect(subject.dig('data', 'projects', 'count')).to eq(2)
end
end
describe 'container_registry_enabled' do
let_it_be(:project, reload: true) { create(:project, :public) }
let_it_be(:user) { create(:user) }

View File

@ -91,8 +91,8 @@ RSpec.describe GitlabSchema.types['User'] do
context 'when requester is nil' do
let(:current_user) { nil }
it 'returns `****`' do
expect(user_name).to eq('****')
it 'returns nothing' do
expect(user_name).to be_nil
end
end
@ -134,8 +134,8 @@ RSpec.describe GitlabSchema.types['User'] do
context 'when requester is nil' do
let(:current_user) { nil }
it 'returns `****`' do
expect(user_name).to eq('****')
it 'returns nothing' do
expect(user_name).to be_nil
end
end

View File

@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Parsers::Coverage::SaxDocument do
subject(:parse_report) { Nokogiri::XML::SAX::Parser.new(described_class.new(coverage_report, project_path, paths)).parse(cobertura) }
describe '#parse!' do
let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
let(:coverage_report) { Gitlab::Ci::Reports::CoverageReport.new }
let(:project_path) { 'foo/bar' }
let(:paths) { ['app/user.rb'] }

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Reports::CoverageReports do
RSpec.describe Gitlab::Ci::Reports::CoverageReport do
let(:coverage_report) { described_class.new }
it { expect(coverage_report.files).to eq({}) }

View File

@ -133,7 +133,7 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
end
context 'assigns the correct position' do
it 'computes de first line where the remove would appear' do
it 'computes the first line where the remove would appear' do
expect(nb_file.highlighted_diff_lines[0].old_pos).to eq(3)
expect(nb_file.highlighted_diff_lines[0].new_pos).to eq(3)
@ -142,8 +142,29 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
end
end
it 'computes de first line where the remove would appear' do
expect(nb_file.highlighted_diff_lines.map(&:text).join('')).to include('[Hidden Image Output]')
context 'has image' do
it 'replaces rich text with img to the embedded image' do
expect(nb_file.highlighted_diff_lines[58].rich_text).to include('<img')
end
it 'adds image to src' do
img = 'data:image/png;base64,some_image_here'
allow(diff).to receive(:diff).and_return("@@ -1,76 +1,74 @@\n ![](#{img})")
expect(nb_file.highlighted_diff_lines[0].rich_text).to include("<img src=\"#{img}\"")
end
end
context 'when embedded image has injected html' do
let(:commit) { project.commit("4963fefc990451a8ad34289ce266b757456fc88c") }
it 'prevents injected html to be rendered as html' do
expect(nb_file.highlighted_diff_lines[45].rich_text).not_to include('<div>Hello')
end
it 'keeps the injected html as part of the string' do
expect(nb_file.highlighted_diff_lines[45].rich_text).to end_with('/div&gt;">')
end
end
end
end

View File

@ -0,0 +1,68 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Redis::DuplicateJobs do
# Note: this is a pseudo-store in front of `SharedState`, meant only as a tool
# to move away from `Sidekiq.redis` for duplicate job data. Thus, we use the
# same store configuration as the former.
let(:instance_specific_config_file) { "config/redis.shared_state.yml" }
let(:environment_config_file_name) { "GITLAB_REDIS_SHARED_STATE_CONFIG_FILE" }
include_examples "redis_shared_examples"
describe '#pool' do
let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
subject { described_class.pool }
before do
redis_clear_raw_config!(Gitlab::Redis::SharedState)
redis_clear_raw_config!(Gitlab::Redis::Queues)
allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
end
after do
redis_clear_raw_config!(Gitlab::Redis::SharedState)
redis_clear_raw_config!(Gitlab::Redis::Queues)
end
around do |example|
clear_pool
example.run
ensure
clear_pool
end
it 'instantiates an instance of MultiStore' do
subject.with do |redis_instance|
expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
expect(redis_instance.secondary_store.connection[:id]).to eq("redis:///path/to/redis.sock/0")
expect(redis_instance.instance_name).to eq('DuplicateJobs')
end
end
it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_duplicate_jobs,
:use_primary_store_as_default_for_duplicate_jobs
end
describe '#raw_config_hash' do
it 'has a legacy default URL' do
expect(subject).to receive(:fetch_config) { false }
expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6382')
end
end
describe '#store_name' do
it 'returns the name of the SharedState store' do
expect(described_class.store_name).to eq('SharedState')
end
end
end

View File

@ -0,0 +1,901 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Redis::MultiStore do
using RSpec::Parameterized::TableSyntax
let_it_be(:redis_store_class) do
Class.new(Gitlab::Redis::Wrapper) do
def config_file_name
config_file_name = "spec/fixtures/config/redis_new_format_host.yml"
Rails.root.join(config_file_name).to_s
end
def self.name
'Sessions'
end
end
end
let_it_be(:primary_db) { 1 }
let_it_be(:secondary_db) { 2 }
let_it_be(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
let_it_be(:secondary_store) { create_redis_store(redis_store_class.params, db: secondary_db, serializer: nil) }
let_it_be(:instance_name) { 'TestStore' }
let_it_be(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
subject { multi_store.send(name, *args) }
before do
skip_feature_flags_yaml_validation
skip_default_enabled_yaml_check
end
after(:all) do
primary_store.flushdb
secondary_store.flushdb
end
context 'when primary_store is nil' do
let(:multi_store) { described_class.new(nil, secondary_store, instance_name)}
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /primary_store is required/)
end
end
context 'when secondary_store is nil' do
let(:multi_store) { described_class.new(primary_store, nil, instance_name)}
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /secondary_store is required/)
end
end
context 'when instance_name is nil' do
let(:instance_name) { nil }
let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /instance_name is required/)
end
end
context 'when primary_store is not a ::Redis instance' do
before do
allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false)
end
it 'fails with exception' do
expect { described_class.new(primary_store, secondary_store, instance_name) }
.to raise_error(ArgumentError, /invalid primary_store/)
end
end
context 'when secondary_store is not a ::Redis instance' do
before do
allow(secondary_store).to receive(:is_a?).with(::Redis).and_return(false)
end
it 'fails with exception' do
expect { described_class.new(primary_store, secondary_store, instance_name) }
.to raise_error(ArgumentError, /invalid secondary_store/)
end
end
context 'with READ redis commands' do
let_it_be(:key1) { "redis:{1}:key_a" }
let_it_be(:key2) { "redis:{1}:key_b" }
let_it_be(:value1) { "redis_value1"}
let_it_be(:value2) { "redis_value2"}
let_it_be(:skey) { "redis:set:key" }
let_it_be(:keys) { [key1, key2] }
let_it_be(:values) { [value1, value2] }
let_it_be(:svalues) { [value2, value1] }
where(:case_name, :name, :args, :value, :block) do
'execute :get command' | :get | ref(:key1) | ref(:value1) | nil
'execute :mget command' | :mget | ref(:keys) | ref(:values) | nil
'execute :mget with block' | :mget | ref(:keys) | ref(:values) | ->(value) { value }
'execute :smembers command' | :smembers | ref(:skey) | ref(:svalues) | nil
'execute :scard command' | :scard | ref(:skey) | 2 | nil
end
before(:all) do
primary_store.multi do |multi|
multi.set(key1, value1)
multi.set(key2, value2)
multi.sadd(skey, value1)
multi.sadd(skey, value2)
end
secondary_store.multi do |multi|
multi.set(key1, value1)
multi.set(key2, value2)
multi.sadd(skey, value1)
multi.sadd(skey, value2)
end
end
RSpec.shared_examples_for 'reads correct value' do
it 'returns the correct value' do
if value.is_a?(Array)
# :smembers does not guarantee the order it will return the values (unsorted set)
is_expected.to match_array(value)
else
is_expected.to eq(value)
end
end
end
RSpec.shared_examples_for 'fallback read from the secondary store' do
let(:counter) { Gitlab::Metrics::NullMetric.instance }
before do
allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
end
it 'fallback and execute on secondary instance' do
expect(secondary_store).to receive(name).with(*args).and_call_original
subject
end
it 'logs the ReadFromPrimaryError' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
an_instance_of(Gitlab::Redis::MultiStore::ReadFromPrimaryError),
hash_including(command_name: name, extra: hash_including(instance_name: instance_name))
)
subject
end
it 'increment read fallback count metrics' do
expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
subject
end
include_examples 'reads correct value'
context 'when fallback read from the secondary instance raises an exception' do
before do
allow(secondary_store).to receive(name).with(*args).and_raise(StandardError)
end
it 'fails with exception' do
expect { subject }.to raise_error(StandardError)
end
end
end
RSpec.shared_examples_for 'secondary store' do
it 'execute on the secondary instance' do
expect(secondary_store).to receive(name).with(*args).and_call_original
subject
end
include_examples 'reads correct value'
it 'does not execute on the primary store' do
expect(primary_store).not_to receive(name)
subject
end
end
with_them do
describe "#{name}" do
before do
allow(primary_store).to receive(name).and_call_original
allow(secondary_store).to receive(name).and_call_original
end
context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
end
context 'when reading from the primary is successful' do
it 'returns the correct value' do
expect(primary_store).to receive(name).with(*args).and_call_original
subject
end
it 'does not execute on the secondary store' do
expect(secondary_store).not_to receive(name)
subject
end
include_examples 'reads correct value'
end
context 'when reading from primary instance is raising an exception' do
before do
allow(primary_store).to receive(name).with(*args).and_raise(StandardError)
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
it 'logs the exception' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
hash_including(extra: hash_including(:multi_store_error_message, instance_name: instance_name),
command_name: name))
subject
end
include_examples 'fallback read from the secondary store'
end
context 'when reading from primary instance return no value' do
before do
allow(primary_store).to receive(name).and_return(nil)
end
include_examples 'fallback read from the secondary store'
end
context 'when the command is executed within pipelined block' do
subject do
multi_store.pipelined do
multi_store.send(name, *args)
end
end
it 'is executed only 1 time on primary instance' do
expect(primary_store).to receive(name).with(*args).once
subject
end
end
if params[:block]
subject do
multi_store.send(name, *args, &block)
end
context 'when block is provided' do
it 'yields to the block' do
expect(primary_store).to receive(name).and_yield(value)
subject
end
include_examples 'reads correct value'
end
end
end
context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
end
context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: false)
end
it_behaves_like 'secondary store'
end
context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: true)
end
it 'execute on the primary instance' do
expect(primary_store).to receive(name).with(*args).and_call_original
subject
end
include_examples 'reads correct value'
it 'does not execute on the secondary store' do
expect(secondary_store).not_to receive(name)
subject
end
end
end
context 'with both primary and secondary store using same redis instance' do
let(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
let(:secondary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
it_behaves_like 'secondary store'
end
end
end
end
RSpec.shared_examples_for 'verify that store contains values' do |store|
it "#{store} redis store contains correct values", :aggregate_errors do
subject
redis_store = multi_store.send(store)
if expected_value.is_a?(Array)
# :smembers does not guarantee the order it will return the values
expect(redis_store.send(verification_name, *verification_args)).to match_array(expected_value)
else
expect(redis_store.send(verification_name, *verification_args)).to eq(expected_value)
end
end
end
context 'with WRITE redis commands' do
let_it_be(:key1) { "redis:{1}:key_a" }
let_it_be(:key2) { "redis:{1}:key_b" }
let_it_be(:value1) { "redis_value1"}
let_it_be(:value2) { "redis_value2"}
let_it_be(:key1_value1) { [key1, value1] }
let_it_be(:key1_value2) { [key1, value2] }
let_it_be(:ttl) { 10 }
let_it_be(:key1_ttl_value1) { [key1, ttl, value1] }
let_it_be(:skey) { "redis:set:key" }
let_it_be(:svalues1) { [value2, value1] }
let_it_be(:svalues2) { [value1] }
let_it_be(:skey_value1) { [skey, value1] }
let_it_be(:skey_value2) { [skey, value2] }
where(:case_name, :name, :args, :expected_value, :verification_name, :verification_args) do
'execute :set command' | :set | ref(:key1_value1) | ref(:value1) | :get | ref(:key1)
'execute :setnx command' | :setnx | ref(:key1_value2) | ref(:value1) | :get | ref(:key2)
'execute :setex command' | :setex | ref(:key1_ttl_value1) | ref(:ttl) | :ttl | ref(:key1)
'execute :sadd command' | :sadd | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey)
'execute :srem command' | :srem | ref(:skey_value1) | [] | :smembers | ref(:skey)
'execute :del command' | :del | ref(:key2) | nil | :get | ref(:key2)
'execute :flushdb command' | :flushdb | nil | 0 | :dbsize | nil
end
before do
primary_store.flushdb
secondary_store.flushdb
primary_store.multi do |multi|
multi.set(key2, value1)
multi.sadd(skey, value1)
end
secondary_store.multi do |multi|
multi.set(key2, value1)
multi.sadd(skey, value1)
end
end
with_them do
describe "#{name}" do
let(:expected_args) {args || no_args }
before do
allow(primary_store).to receive(name).and_call_original
allow(secondary_store).to receive(name).and_call_original
end
context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
end
context 'when executing on primary instance is successful' do
it 'executes on both primary and secondary redis store', :aggregate_errors do
expect(primary_store).to receive(name).with(*expected_args).and_call_original
expect(secondary_store).to receive(name).with(*expected_args).and_call_original
subject
end
include_examples 'verify that store contains values', :primary_store
include_examples 'verify that store contains values', :secondary_store
end
context 'when executing on the primary instance is raising an exception' do
before do
allow(primary_store).to receive(name).with(*expected_args).and_raise(StandardError)
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
it 'logs the exception and execute on secondary instance', :aggregate_errors do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
hash_including(extra: hash_including(:multi_store_error_message), command_name: name))
expect(secondary_store).to receive(name).with(*expected_args).and_call_original
subject
end
include_examples 'verify that store contains values', :secondary_store
end
context 'when the command is executed within pipelined block' do
subject do
multi_store.pipelined do
multi_store.send(name, *args)
end
end
it 'is executed only 1 time on each instance', :aggregate_errors do
expect(primary_store).to receive(name).with(*expected_args).once
expect(secondary_store).to receive(name).with(*expected_args).once
subject
end
include_examples 'verify that store contains values', :primary_store
include_examples 'verify that store contains values', :secondary_store
end
end
context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
end
context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: false)
end
it 'executes only on the secondary redis store', :aggregate_errors do
expect(secondary_store).to receive(name).with(*expected_args)
expect(primary_store).not_to receive(name).with(*expected_args)
subject
end
include_examples 'verify that store contains values', :secondary_store
end
context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: true)
end
it 'executes only on the primary_redis redis store', :aggregate_errors do
expect(primary_store).to receive(name).with(*expected_args)
expect(secondary_store).not_to receive(name).with(*expected_args)
subject
end
include_examples 'verify that store contains values', :primary_store
end
end
end
end
end
RSpec.shared_examples_for 'pipelined command' do |name|
let_it_be(:key1) { "redis:{1}:key_a" }
let_it_be(:value1) { "redis_value1"}
let_it_be(:value2) { "redis_value2"}
let_it_be(:expected_value) { value1 }
let_it_be(:verification_name) { :get }
let_it_be(:verification_args) { key1 }
before do
primary_store.flushdb
secondary_store.flushdb
end
describe "command execution in a transaction" do
let(:counter) { Gitlab::Metrics::NullMetric.instance }
before do
allow(Gitlab::Metrics).to receive(:counter).with(
:gitlab_redis_multi_store_pipelined_diff_error_total,
'Redis MultiStore pipelined command diff between stores'
).and_return(counter)
end
subject do
multi_store.send(name) do |redis|
redis.set(key1, value1)
end
end
context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
end
context 'when executing on primary instance is successful' do
it 'executes on both primary and secondary redis store', :aggregate_errors do
expect(primary_store).to receive(name).and_call_original
expect(secondary_store).to receive(name).and_call_original
subject
end
include_examples 'verify that store contains values', :primary_store
include_examples 'verify that store contains values', :secondary_store
end
context 'when executing on the primary instance is raising an exception' do
before do
allow(primary_store).to receive(name).and_raise(StandardError)
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
it 'logs the exception and execute on secondary instance', :aggregate_errors do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
hash_including(extra: hash_including(:multi_store_error_message), command_name: name))
expect(secondary_store).to receive(name).and_call_original
subject
end
include_examples 'verify that store contains values', :secondary_store
end
describe 'return values from a transaction' do
subject do
multi_store.send(name) do |redis|
redis.get(key1)
end
end
context 'when the value exists on both and are equal' do
before do
primary_store.set(key1, value1)
secondary_store.set(key1, value1)
end
it 'returns the value' do
expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
expect(subject).to eq([value1])
end
end
context 'when the value exists on both but differ' do
before do
primary_store.set(key1, value1)
secondary_store.set(key1, value2)
end
it 'returns the value from the secondary store, logging an error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
hash_including(command_name: name, extra: hash_including(instance_name: instance_name))
).and_call_original
expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
expect(subject).to eq([value2])
end
end
context 'when the value does not exist on the primary but it does on the secondary' do
before do
secondary_store.set(key1, value2)
end
it 'returns the value from the secondary store, logging an error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
hash_including(command_name: name, extra: hash_including(instance_name: instance_name))
)
expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
expect(subject).to eq([value2])
end
end
context 'when the value does not exist in either' do
it 'returns nil without logging an error' do
expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
expect(counter).not_to receive(:increment)
expect(subject).to eq([nil])
end
end
end
end
context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
end
context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: false)
end
it 'executes only on the secondary redis store', :aggregate_errors do
expect(secondary_store).to receive(name)
expect(primary_store).not_to receive(name)
subject
end
include_examples 'verify that store contains values', :secondary_store
end
context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: true)
end
it 'executes only on the primary_redis redis store', :aggregate_errors do
expect(primary_store).to receive(name)
expect(secondary_store).not_to receive(name)
subject
end
include_examples 'verify that store contains values', :primary_store
end
end
end
end
describe '#multi' do
include_examples 'pipelined command', :multi
end
describe '#pipelined' do
include_examples 'pipelined command', :pipelined
end
context 'with unsupported command' do
let(:counter) { Gitlab::Metrics::NullMetric.instance }
before do
primary_store.flushdb
secondary_store.flushdb
allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
end
let_it_be(:key) { "redis:counter" }
subject { multi_store.incr(key) }
it 'responds to missing method' do
expect(multi_store).to receive(:respond_to_missing?).and_call_original
expect(multi_store.respond_to?(:incr)).to be(true)
end
it 'executes method missing' do
expect(multi_store).to receive(:method_missing)
subject
end
context 'when command is not in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
it 'logs MethodMissingError' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError),
hash_including(command_name: :incr, extra: hash_including(instance_name: instance_name))
)
subject
end
it 'increments method missing counter' do
expect(counter).to receive(:increment).with(command: :incr, instance_name: instance_name)
subject
end
end
context 'when command is in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
subject { multi_store.info }
it 'does not log MethodMissingError' do
expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
subject
end
it 'does not increment method missing counter' do
expect(counter).not_to receive(:increment)
subject
end
end
context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: true)
end
it 'fallback and executes only on the secondary store', :aggregate_errors do
expect(primary_store).to receive(:incr).with(key).and_call_original
expect(secondary_store).not_to receive(:incr)
subject
end
it 'correct value is stored on the secondary store', :aggregate_errors do
subject
expect(secondary_store.get(key)).to be_nil
expect(primary_store.get(key)).to eq('1')
end
end
context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: false)
end
it 'fallback and executes only on the secondary store', :aggregate_errors do
expect(secondary_store).to receive(:incr).with(key).and_call_original
expect(primary_store).not_to receive(:incr)
subject
end
it 'correct value is stored on the secondary store', :aggregate_errors do
subject
expect(primary_store.get(key)).to be_nil
expect(secondary_store.get(key)).to eq('1')
end
end
context 'when the command is executed within pipelined block' do
subject do
multi_store.pipelined do
multi_store.incr(key)
end
end
it 'is executed only 1 time on each instance', :aggregate_errors do
expect(primary_store).to receive(:incr).with(key).once
expect(secondary_store).to receive(:incr).with(key).once
subject
end
it "both redis stores are containing correct values", :aggregate_errors do
subject
expect(primary_store.get(key)).to eq('1')
expect(secondary_store.get(key)).to eq('1')
end
end
end
describe '#to_s' do
subject { multi_store.to_s }
context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
end
it 'returns same value as primary_store' do
is_expected.to eq(primary_store.to_s)
end
end
context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
end
context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: true)
end
it 'returns same value as primary_store' do
is_expected.to eq(primary_store.to_s)
end
end
context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: false)
end
it 'returns same value as primary_store' do
is_expected.to eq(secondary_store.to_s)
end
end
end
end
describe '#is_a?' do
it 'returns true for ::Redis::Store' do
expect(multi_store.is_a?(::Redis::Store)).to be true
end
end
describe '#use_primary_and_secondary_stores?' do
context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
end
it 'multi store is disabled' do
expect(multi_store.use_primary_and_secondary_stores?).to be true
end
end
context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
before do
stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
end
it 'multi store is disabled' do
expect(multi_store.use_primary_and_secondary_stores?).to be false
end
end
context 'with empty DB' do
before do
allow(Feature::FlipperFeature).to receive(:table_exists?).and_return(false)
end
it 'multi store is disabled' do
expect(multi_store.use_primary_and_secondary_stores?).to be false
end
end
context 'when FF table guard raises' do
before do
allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise
end
it 'multi store is disabled' do
expect(multi_store.use_primary_and_secondary_stores?).to be false
end
end
end
describe '#use_primary_store_as_default?' do
context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: true)
end
it 'multi store is disabled' do
expect(multi_store.use_primary_store_as_default?).to be true
end
end
context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
before do
stub_feature_flags(use_primary_store_as_default_for_test_store: false)
end
it 'multi store is disabled' do
expect(multi_store.use_primary_store_as_default?).to be false
end
end
context 'with empty DB' do
before do
allow(Feature::FlipperFeature).to receive(:table_exists?).and_return(false)
end
it 'multi store is disabled' do
expect(multi_store.use_primary_and_secondary_stores?).to be false
end
end
context 'when FF table guard raises' do
before do
allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise
end
it 'multi store is disabled' do
expect(multi_store.use_primary_and_secondary_stores?).to be false
end
end
end
def create_redis_store(options, extras = {})
::Redis::Store.new(options.merge(extras))
end
end

View File

@ -59,6 +59,21 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
it 'logs the normalized SQL query for statement timeouts' do
travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload)
expect(logger).to receive(:warn).with(
include('exception.sql' => 'SELECT "users".* FROM "users" WHERE "users"."id" = $1 AND "users"."foo" = $2')
)
expect do
call_subject(job, 'test_queue') do
raise ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = 2')
end
end.to raise_error(ActiveRecord::StatementInvalid)
end
end
it 'logs the root cause of an Sidekiq::JobRetry::Skip exception in the job' do
travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload)
@ -100,8 +115,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
include(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
'job_status' => 'fail',
'error_class' => 'Sidekiq::JobRetry::Skip',
'error_message' => 'Sidekiq::JobRetry::Skip'
'exception.class' => 'Sidekiq::JobRetry::Skip',
'exception.message' => 'Sidekiq::JobRetry::Skip'
)
)
expect(subject).to receive(:log_job_start).and_call_original

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_redis_queues do
RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
using RSpec::Parameterized::TableSyntax
subject(:duplicate_job) do
@ -81,135 +81,99 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
describe '#check!' do
context 'when there was no job in the queue yet' do
it { expect(duplicate_job.check!).to eq('123') }
shared_examples 'tracking duplicates in redis' do
describe '#check!' do
context 'when there was no job in the queue yet' do
it { expect(duplicate_job.check!).to eq('123') }
shared_examples 'sets Redis keys with correct TTL' do
it "adds an idempotency key with correct ttl" do
expect { duplicate_job.check! }
.to change { read_idempotency_key_with_ttl(idempotency_key) }
.from([nil, -2])
.to(['123', be_within(1).of(expected_ttl)])
end
context 'when wal locations is not empty' do
it "adds an existing wal locations key with correct ttl" do
shared_examples 'sets Redis keys with correct TTL' do
it "adds an idempotency key with correct ttl" do
expect { duplicate_job.check! }
.to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
.to change { read_idempotency_key_with_ttl(idempotency_key) }
.from([nil, -2])
.to([wal_locations[:main], be_within(1).of(expected_ttl)])
.and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
.from([nil, -2])
.to([wal_locations[:ci], be_within(1).of(expected_ttl)])
.to(['123', be_within(1).of(expected_ttl)])
end
context 'when wal locations is not empty' do
it "adds an existing wal locations key with correct ttl" do
expect { duplicate_job.check! }
.to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
.from([nil, -2])
.to([wal_locations[:main], be_within(1).of(expected_ttl)])
.and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
.from([nil, -2])
.to([wal_locations[:ci], be_within(1).of(expected_ttl)])
end
end
end
context 'when TTL option is not set' do
let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
it_behaves_like 'sets Redis keys with correct TTL'
end
context 'when TTL option is set' do
let(:expected_ttl) { 5.minutes }
before do
allow(duplicate_job).to receive(:options).and_return({ ttl: expected_ttl })
end
it_behaves_like 'sets Redis keys with correct TTL'
end
it "adds the idempotency key to the jobs payload" do
expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key)
end
end
context 'with TTL option is not set' do
let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
it_behaves_like 'sets Redis keys with correct TTL'
end
context 'when TTL option is set' do
let(:expected_ttl) { 5.minutes }
context 'when there was already a job with same arguments in the same queue' do
before do
allow(duplicate_job).to receive(:options).and_return({ ttl: expected_ttl })
set_idempotency_key(idempotency_key, 'existing-key')
wal_locations.each do |config_name, location|
set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
end
end
it_behaves_like 'sets Redis keys with correct TTL'
end
it { expect(duplicate_job.check!).to eq('existing-key') }
it "adds the idempotency key to the jobs payload" do
expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key)
it "does not change the existing key's TTL" do
expect { duplicate_job.check! }
.not_to change { read_idempotency_key_with_ttl(idempotency_key) }
.from(['existing-key', -1])
end
it "does not change the existing wal locations key's TTL" do
expect { duplicate_job.check! }
.to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
.from([wal_locations[:main], -1])
.and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
.from([wal_locations[:ci], -1])
end
it 'sets the existing jid' do
duplicate_job.check!
expect(duplicate_job.existing_jid).to eq('existing-key')
end
end
end
context 'when there was already a job with same arguments in the same queue' do
describe '#update_latest_wal_location!' do
before do
set_idempotency_key(idempotency_key, 'existing-key')
wal_locations.each do |config_name, location|
set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
end
end
allow(Gitlab::Database).to receive(:database_base_models).and_return(
{ main: ::ActiveRecord::Base,
ci: ::ActiveRecord::Base })
it { expect(duplicate_job.check!).to eq('existing-key') }
set_idempotency_key(existing_wal_location_key(idempotency_key, :main), existing_wal[:main])
set_idempotency_key(existing_wal_location_key(idempotency_key, :ci), existing_wal[:ci])
it "does not change the existing key's TTL" do
expect { duplicate_job.check! }
.not_to change { read_idempotency_key_with_ttl(idempotency_key) }
.from(['existing-key', -1])
end
it "does not change the existing wal locations key's TTL" do
expect { duplicate_job.check! }
.to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
.from([wal_locations[:main], -1])
.and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
.from([wal_locations[:ci], -1])
end
it 'sets the existing jid' do
# read existing_wal_locations
duplicate_job.check!
expect(duplicate_job.existing_jid).to eq('existing-key')
end
end
end
describe '#update_latest_wal_location!' do
before do
allow(Gitlab::Database).to receive(:database_base_models).and_return(
{ main: ::ActiveRecord::Base,
ci: ::ActiveRecord::Base })
set_idempotency_key(existing_wal_location_key(idempotency_key, :main), existing_wal[:main])
set_idempotency_key(existing_wal_location_key(idempotency_key, :ci), existing_wal[:ci])
# read existing_wal_locations
duplicate_job.check!
end
context "when the key doesn't exists in redis" do
let(:existing_wal) do
{
main: '0/D525E3A0',
ci: 'AB/12340'
}
end
let(:new_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
main: ['0/D525E3A8', '8'],
ci: ['AB/12345', '5']
}
end
let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
it 'stores a wal location to redis with an offset relative to existing wal location' do
expect { duplicate_job.update_latest_wal_location! }
.to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
.from([])
.to(new_wal_location_with_offset[:main])
.and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
.from([])
.to(new_wal_location_with_offset[:ci])
end
end
context "when the key exists in redis" do
before do
rpush_to_redis_key(wal_location_key(idempotency_key, :main), *stored_wal_location_with_offset[:main])
rpush_to_redis_key(wal_location_key(idempotency_key, :ci), *stored_wal_location_with_offset[:ci])
end
let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
context "when the new offset is bigger then the existing one" do
context "when the key doesn't exists in redis" do
let(:existing_wal) do
{
main: '0/D525E3A0',
@ -217,14 +181,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
}
end
let(:stored_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
main: ['0/D525E3A3', '3'],
ci: ['AB/12342', '2']
}
end
let(:new_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
@ -233,154 +189,335 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
}
end
it 'updates a wal location to redis with an offset' do
let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
it 'stores a wal location to redis with an offset relative to existing wal location' do
expect { duplicate_job.update_latest_wal_location! }
.to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
.from(stored_wal_location_with_offset[:main])
.from([])
.to(new_wal_location_with_offset[:main])
.and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
.from(stored_wal_location_with_offset[:ci])
.from([])
.to(new_wal_location_with_offset[:ci])
end
end
context "when the old offset is not bigger then the existing one" do
let(:existing_wal) do
{
main: '0/D525E3A0',
ci: 'AB/12340'
}
context "when the key exists in redis" do
before do
rpush_to_redis_key(wal_location_key(idempotency_key, :main), *stored_wal_location_with_offset[:main])
rpush_to_redis_key(wal_location_key(idempotency_key, :ci), *stored_wal_location_with_offset[:ci])
end
let(:stored_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
main: ['0/D525E3A8', '8'],
ci: ['AB/12345', '5']
}
let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
context "when the new offset is bigger then the existing one" do
let(:existing_wal) do
{
main: '0/D525E3A0',
ci: 'AB/12340'
}
end
let(:stored_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
main: ['0/D525E3A3', '3'],
ci: ['AB/12342', '2']
}
end
let(:new_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
main: ['0/D525E3A8', '8'],
ci: ['AB/12345', '5']
}
end
it 'updates a wal location to redis with an offset' do
expect { duplicate_job.update_latest_wal_location! }
.to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
.from(stored_wal_location_with_offset[:main])
.to(new_wal_location_with_offset[:main])
.and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
.from(stored_wal_location_with_offset[:ci])
.to(new_wal_location_with_offset[:ci])
end
end
let(:new_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
main: ['0/D525E3A2', '2'],
ci: ['AB/12342', '2']
}
context "when the old offset is not bigger then the existing one" do
let(:existing_wal) do
{
main: '0/D525E3A0',
ci: 'AB/12340'
}
end
let(:stored_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
main: ['0/D525E3A8', '8'],
ci: ['AB/12345', '5']
}
end
let(:new_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
main: ['0/D525E3A2', '2'],
ci: ['AB/12342', '2']
}
end
it "does not update a wal location to redis with an offset" do
expect { duplicate_job.update_latest_wal_location! }
.to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
.from(stored_wal_location_with_offset[:main])
.and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
.from(stored_wal_location_with_offset[:ci])
end
end
end
end
describe '#latest_wal_locations' do
context 'when job was deduplicated and wal locations were already persisted' do
before do
rpush_to_redis_key(wal_location_key(idempotency_key, :main), wal_locations[:main], 1024)
rpush_to_redis_key(wal_location_key(idempotency_key, :ci), wal_locations[:ci], 1024)
end
it "does not update a wal location to redis with an offset" do
expect { duplicate_job.update_latest_wal_location! }
.to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
.from(stored_wal_location_with_offset[:main])
.and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
.from(stored_wal_location_with_offset[:ci])
it { expect(duplicate_job.latest_wal_locations).to eq(wal_locations) }
end
context 'when job is not deduplication and wal locations were not persisted' do
it { expect(duplicate_job.latest_wal_locations).to be_empty }
end
end
describe '#delete!' do
context "when we didn't track the definition" do
it { expect { duplicate_job.delete! }.not_to raise_error }
end
context 'when the key exists in redis' do
before do
set_idempotency_key(idempotency_key, 'existing-jid')
set_idempotency_key(deduplicated_flag_key, 1)
wal_locations.each do |config_name, location|
set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
set_idempotency_key(wal_location_key(idempotency_key, config_name), location)
end
end
shared_examples 'deleting the duplicate job' do
shared_examples 'deleting keys from redis' do |key_name|
it "removes the #{key_name} from redis" do
expect { duplicate_job.delete! }
.to change { read_idempotency_key_with_ttl(key) }
.from([from_value, -1])
.to([nil, -2])
end
end
shared_examples 'does not delete key from redis' do |key_name|
it "does not remove the #{key_name} from redis" do
expect { duplicate_job.delete! }
.to not_change { read_idempotency_key_with_ttl(key) }
.from([from_value, -1])
end
end
it_behaves_like 'deleting keys from redis', 'idempotent key' do
let(:key) { idempotency_key }
let(:from_value) { 'existing-jid' }
end
it_behaves_like 'deleting keys from redis', 'deduplication counter key' do
let(:key) { deduplicated_flag_key }
let(:from_value) { '1' }
end
it_behaves_like 'deleting keys from redis', 'existing wal location keys for main database' do
let(:key) { existing_wal_location_key(idempotency_key, :main) }
let(:from_value) { wal_locations[:main] }
end
it_behaves_like 'deleting keys from redis', 'existing wal location keys for ci database' do
let(:key) { existing_wal_location_key(idempotency_key, :ci) }
let(:from_value) { wal_locations[:ci] }
end
it_behaves_like 'deleting keys from redis', 'latest wal location keys for main database' do
let(:key) { wal_location_key(idempotency_key, :main) }
let(:from_value) { wal_locations[:main] }
end
it_behaves_like 'deleting keys from redis', 'latest wal location keys for ci database' do
let(:key) { wal_location_key(idempotency_key, :ci) }
let(:from_value) { wal_locations[:ci] }
end
end
context 'when the idempotency key is not part of the job' do
it_behaves_like 'deleting the duplicate job'
it 'recalculates the idempotency hash' do
expect(duplicate_job).to receive(:idempotency_hash).and_call_original
duplicate_job.delete!
end
end
context 'when the idempotency key is part of the job' do
let(:idempotency_key) { 'not the same as what we calculate' }
let(:job) { super().merge('idempotency_key' => idempotency_key) }
it_behaves_like 'deleting the duplicate job'
it 'does not recalculate the idempotency hash' do
expect(duplicate_job).not_to receive(:idempotency_hash)
duplicate_job.delete!
end
end
end
end
describe '#set_deduplicated_flag!' do
context 'when the job is reschedulable' do
before do
allow(duplicate_job).to receive(:reschedulable?) { true }
end
it 'sets the key in Redis' do
duplicate_job.set_deduplicated_flag!
flag = with_redis { |redis| redis.get(deduplicated_flag_key) }
expect(flag).to eq(described_class::DEDUPLICATED_FLAG_VALUE.to_s)
end
it 'sets, gets and cleans up the deduplicated flag' do
expect(duplicate_job.should_reschedule?).to eq(false)
duplicate_job.set_deduplicated_flag!
expect(duplicate_job.should_reschedule?).to eq(true)
duplicate_job.delete!
expect(duplicate_job.should_reschedule?).to eq(false)
end
end
context 'when the job is not reschedulable' do
before do
allow(duplicate_job).to receive(:reschedulable?) { false }
end
it 'does not set the key in Redis' do
duplicate_job.set_deduplicated_flag!
flag = with_redis { |redis| redis.get(deduplicated_flag_key) }
expect(flag).to be_nil
end
it 'does not set the deduplicated flag' do
expect(duplicate_job.should_reschedule?).to eq(false)
duplicate_job.set_deduplicated_flag!
expect(duplicate_job.should_reschedule?).to eq(false)
duplicate_job.delete!
expect(duplicate_job.should_reschedule?).to eq(false)
end
end
end
describe '#duplicate?' do
it "raises an error if the check wasn't performed" do
expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/
end
it 'returns false if the existing jid equals the job jid' do
duplicate_job.check!
expect(duplicate_job.duplicate?).to be(false)
end
it 'returns false if the existing jid is different from the job jid' do
set_idempotency_key(idempotency_key, 'a different jid')
duplicate_job.check!
expect(duplicate_job.duplicate?).to be(true)
end
end
def existing_wal_location_key(idempotency_key, connection_name)
"#{idempotency_key}:#{connection_name}:existing_wal_location"
end
def wal_location_key(idempotency_key, connection_name)
"#{idempotency_key}:#{connection_name}:wal_location"
end
def set_idempotency_key(key, value = '1')
with_redis { |r| r.set(key, value) }
end
def rpush_to_redis_key(key, wal, offset)
with_redis { |r| r.rpush(key, [wal, offset]) }
end
def read_idempotency_key_with_ttl(key)
with_redis do |redis|
redis.pipelined do |p|
p.get(key)
p.ttl(key)
end
end
end
def read_range_from_redis(key)
with_redis do |redis|
redis.lrange(key, 0, -1)
end
end
end
describe '#latest_wal_locations' do
context 'when job was deduplicated and wal locations were already persisted' do
before do
rpush_to_redis_key(wal_location_key(idempotency_key, :main), wal_locations[:main], 1024)
rpush_to_redis_key(wal_location_key(idempotency_key, :ci), wal_locations[:ci], 1024)
end
it { expect(duplicate_job.latest_wal_locations).to eq(wal_locations) }
context 'with multi-store feature flags turned on' do
def with_redis(&block)
Gitlab::Redis::DuplicateJobs.with(&block)
end
context 'when job is not deduplication and wal locations were not persisted' do
it { expect(duplicate_job.latest_wal_locations).to be_empty }
it 'use Gitlab::Redis::DuplicateJobs.with' do
expect(Gitlab::Redis::DuplicateJobs).to receive(:with).and_call_original
expect(Sidekiq).not_to receive(:redis)
duplicate_job.check!
end
it_behaves_like 'tracking duplicates in redis'
end
describe '#delete!' do
context "when we didn't track the definition" do
it { expect { duplicate_job.delete! }.not_to raise_error }
context 'when both multi-store feature flags are off' do
def with_redis(&block)
Sidekiq.redis(&block)
end
context 'when the key exists in redis' do
before do
set_idempotency_key(idempotency_key, 'existing-jid')
set_idempotency_key(deduplicated_flag_key, 1)
wal_locations.each do |config_name, location|
set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
set_idempotency_key(wal_location_key(idempotency_key, config_name), location)
end
end
shared_examples 'deleting the duplicate job' do
shared_examples 'deleting keys from redis' do |key_name|
it "removes the #{key_name} from redis" do
expect { duplicate_job.delete! }
.to change { read_idempotency_key_with_ttl(key) }
.from([from_value, -1])
.to([nil, -2])
end
end
shared_examples 'does not delete key from redis' do |key_name|
it "does not remove the #{key_name} from redis" do
expect { duplicate_job.delete! }
.to not_change { read_idempotency_key_with_ttl(key) }
.from([from_value, -1])
end
end
it_behaves_like 'deleting keys from redis', 'idempotent key' do
let(:key) { idempotency_key }
let(:from_value) { 'existing-jid' }
end
it_behaves_like 'deleting keys from redis', 'deduplication counter key' do
let(:key) { deduplicated_flag_key }
let(:from_value) { '1' }
end
it_behaves_like 'deleting keys from redis', 'existing wal location keys for main database' do
let(:key) { existing_wal_location_key(idempotency_key, :main) }
let(:from_value) { wal_locations[:main] }
end
it_behaves_like 'deleting keys from redis', 'existing wal location keys for ci database' do
let(:key) { existing_wal_location_key(idempotency_key, :ci) }
let(:from_value) { wal_locations[:ci] }
end
it_behaves_like 'deleting keys from redis', 'latest wal location keys for main database' do
let(:key) { wal_location_key(idempotency_key, :main) }
let(:from_value) { wal_locations[:main] }
end
it_behaves_like 'deleting keys from redis', 'latest wal location keys for ci database' do
let(:key) { wal_location_key(idempotency_key, :ci) }
let(:from_value) { wal_locations[:ci] }
end
end
context 'when the idempotency key is not part of the job' do
it_behaves_like 'deleting the duplicate job'
it 'recalculates the idempotency hash' do
expect(duplicate_job).to receive(:idempotency_hash).and_call_original
duplicate_job.delete!
end
end
context 'when the idempotency key is part of the job' do
let(:idempotency_key) { 'not the same as what we calculate' }
let(:job) { super().merge('idempotency_key' => idempotency_key) }
it_behaves_like 'deleting the duplicate job'
it 'does not recalculate the idempotency hash' do
expect(duplicate_job).not_to receive(:idempotency_hash)
duplicate_job.delete!
end
end
before do
stub_feature_flags(use_primary_and_secondary_stores_for_duplicate_jobs: false)
stub_feature_flags(use_primary_store_as_default_for_duplicate_jobs: false)
end
it 'use Sidekiq.redis' do
expect(Sidekiq).to receive(:redis).and_call_original
expect(Gitlab::Redis::DuplicateJobs).not_to receive(:with)
duplicate_job.check!
end
it_behaves_like 'tracking duplicates in redis'
end
describe '#scheduled?' do
@ -449,75 +586,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
describe '#set_deduplicated_flag!' do
context 'when the job is reschedulable' do
before do
allow(duplicate_job).to receive(:reschedulable?) { true }
end
it 'sets the key in Redis' do
duplicate_job.set_deduplicated_flag!
flag = Sidekiq.redis { |redis| redis.get(deduplicated_flag_key) }
expect(flag).to eq(described_class::DEDUPLICATED_FLAG_VALUE.to_s)
end
it 'sets, gets and cleans up the deduplicated flag' do
expect(duplicate_job.should_reschedule?).to eq(false)
duplicate_job.set_deduplicated_flag!
expect(duplicate_job.should_reschedule?).to eq(true)
duplicate_job.delete!
expect(duplicate_job.should_reschedule?).to eq(false)
end
end
context 'when the job is not reschedulable' do
before do
allow(duplicate_job).to receive(:reschedulable?) { false }
end
it 'does not set the key in Redis' do
duplicate_job.set_deduplicated_flag!
flag = Sidekiq.redis { |redis| redis.get(deduplicated_flag_key) }
expect(flag).to be_nil
end
it 'does not set the deduplicated flag' do
expect(duplicate_job.should_reschedule?).to eq(false)
duplicate_job.set_deduplicated_flag!
expect(duplicate_job.should_reschedule?).to eq(false)
duplicate_job.delete!
expect(duplicate_job.should_reschedule?).to eq(false)
end
end
end
describe '#duplicate?' do
it "raises an error if the check wasn't performed" do
expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/
end
it 'returns false if the existing jid equals the job jid' do
duplicate_job.check!
expect(duplicate_job.duplicate?).to be(false)
end
it 'returns false if the existing jid is different from the job jid' do
set_idempotency_key(idempotency_key, 'a different jid')
duplicate_job.check!
expect(duplicate_job.duplicate?).to be(true)
end
end
describe '#scheduled_at' do
let(:scheduled_at) { 42 }
let(:job) do
@ -592,35 +660,4 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
end
def existing_wal_location_key(idempotency_key, connection_name)
"#{idempotency_key}:#{connection_name}:existing_wal_location"
end
def wal_location_key(idempotency_key, connection_name)
"#{idempotency_key}:#{connection_name}:wal_location"
end
def set_idempotency_key(key, value = '1')
Sidekiq.redis { |r| r.set(key, value) }
end
def rpush_to_redis_key(key, wal, offset)
Sidekiq.redis { |r| r.rpush(key, [wal, offset]) }
end
def read_idempotency_key_with_ttl(key)
Sidekiq.redis do |redis|
redis.pipelined do |p|
p.get(key)
p.ttl(key)
end
end
end
def read_range_from_redis(key)
Sidekiq.redis do |redis|
redis.lrange(key, 0, -1)
end
end
end

View File

@ -4481,7 +4481,7 @@ RSpec.describe Ci::Build do
describe '#collect_coverage_reports!' do
subject { build.collect_coverage_reports!(coverage_report) }
let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
let(:coverage_report) { Gitlab::Ci::Reports::CoverageReport.new }
it { expect(coverage_report.files).to eq({}) }

View File

@ -99,6 +99,17 @@ RSpec.describe PgFullTextSearchable do
it 'does not support searching by non-Latin characters' do
expect(model_class.pg_full_text_search('日本')).to be_empty
end
context 'when search term has a URL' do
let(:with_url) { model_class.create!(project: project, title: 'issue with url', description: 'sample url,https://gitlab.com/gitlab-org/gitlab') }
it 'allows searching by full URL, ignoring the scheme' do
with_url.update_search_data!
expect(model_class.pg_full_text_search('https://gitlab.com/gitlab-org/gitlab')).to contain_exactly(with_url)
expect(model_class.pg_full_text_search('gopher://gitlab.com/gitlab-org/gitlab')).to contain_exactly(with_url)
end
end
end
describe '#update_search_data!' do

View File

@ -17,7 +17,6 @@ RSpec.describe 'Getting starredProjects of the user' do
let_it_be(:user, reload: true) { create(:user) }
let(:user_fields) { 'starredProjects { nodes { id } }' }
let(:current_user) { nil }
let(:starred_projects) do
post_graphql(query, current_user: current_user)
@ -34,21 +33,23 @@ RSpec.describe 'Getting starredProjects of the user' do
user.toggle_star(project_c)
end
it_behaves_like 'a working graphql query' do
before do
post_graphql(query)
end
end
context 'anonymous access' do
let(:current_user) { nil }
it 'found only public project' do
expect(starred_projects).to contain_exactly(
a_graphql_entity_for(project_a)
)
it 'returns nothing' do
expect(starred_projects).to be_nil
end
end
context 'the current user is the user' do
let(:current_user) { user }
it_behaves_like 'a working graphql query' do
before do
post_graphql(query, current_user: current_user)
end
end
it 'found all projects' do
expect(starred_projects).to contain_exactly(
a_graphql_entity_for(project_a),

View File

@ -53,7 +53,7 @@ module TestEnv
'wip' => 'b9238ee',
'csv' => '3dd0896',
'v1.1.0' => 'b83d6e3',
'add-ipython-files' => 'a867a602',
'add-ipython-files' => '4963fef',
'add-pdf-file' => 'e774ebd',
'squash-large-files' => '54cec52',
'add-pdf-text-binary' => '79faa7b',

View File

@ -18,7 +18,10 @@ RSpec.shared_context 'structured_logger' do
"correlation_id" => 'cid',
"error_message" => "wrong number of arguments (2 for 3)",
"error_class" => "ArgumentError",
"error_backtrace" => []
"error_backtrace" => [],
"exception.message" => "wrong number of arguments (2 for 3)",
"exception.class" => "ArgumentError",
"exception.backtrace" => []
}
end
@ -28,7 +31,10 @@ RSpec.shared_context 'structured_logger' do
let(:clock_thread_cputime_start) { 0.222222299 }
let(:clock_thread_cputime_end) { 1.333333799 }
let(:start_payload) do
job.except('error_backtrace', 'error_class', 'error_message').merge(
job.except(
'error_message', 'error_class', 'error_backtrace',
'exception.backtrace', 'exception.class', 'exception.message'
).merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
'job_status' => 'start',
'pid' => Process.pid,
@ -68,7 +74,10 @@ RSpec.shared_context 'structured_logger' do
'job_status' => 'fail',
'error_class' => 'ArgumentError',
'error_message' => 'Something went wrong',
'error_backtrace' => be_a(Array).and(be_present)
'error_backtrace' => be_a(Array).and(be_present),
'exception.class' => 'ArgumentError',
'exception.message' => 'Something went wrong',
'exception.backtrace' => be_a(Array).and(be_present)
)
end

View File

@ -0,0 +1,51 @@
# frozen_string_literal: true
RSpec.shared_examples 'multi store feature flags' do |use_primary_and_secondary_stores, use_primary_store_as_default|
context "with feature flag :#{use_primary_and_secondary_stores} is enabled" do
before do
stub_feature_flags(use_primary_and_secondary_stores => true)
end
it 'multi store is enabled' do
subject.with do |redis_instance|
expect(redis_instance.use_primary_and_secondary_stores?).to be true
end
end
end
context "with feature flag :#{use_primary_and_secondary_stores} is disabled" do
before do
stub_feature_flags(use_primary_and_secondary_stores => false)
end
it 'multi store is disabled' do
subject.with do |redis_instance|
expect(redis_instance.use_primary_and_secondary_stores?).to be false
end
end
end
context "with feature flag :#{use_primary_store_as_default} is enabled" do
before do
stub_feature_flags(use_primary_store_as_default => true)
end
it 'primary store is enabled' do
subject.with do |redis_instance|
expect(redis_instance.use_primary_store_as_default?).to be true
end
end
end
context "with feature flag :#{use_primary_store_as_default} is disabled" do
before do
stub_feature_flags(use_primary_store_as_default => false)
end
it 'primary store is disabled' do
subject.with do |redis_instance|
expect(redis_instance.use_primary_store_as_default?).to be false
end
end
end
end

View File

@ -101,14 +101,15 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'Rakefile' | [:backend]
'FOO_VERSION' | [:backend]
'lib/scripts/bar.rb' | [:backend, :tooling]
'lib/scripts/bar.js' | [:frontend, :tooling]
'scripts/glfm/bar.rb' | [:backend]
'scripts/glfm/bar.js' | [:frontend]
'scripts/lib/glfm/bar.rb' | [:backend]
'scripts/lib/glfm/bar.js' | [:frontend]
'scripts/bar.rb' | [:backend, :tooling]
'scripts/bar.js' | [:frontend, :tooling]
'lib/scripts/subdir/bar.rb' | [:backend, :tooling]
'lib/scripts/subdir/bar.js' | [:frontend, :tooling]
'scripts/subdir/bar.rb' | [:backend, :tooling]
'scripts/subdir/bar.js' | [:frontend, :tooling]
'scripts/foo' | [:tooling]
'Dangerfile' | [:tooling]
'danger/bundle_size/Dangerfile' | [:tooling]
@ -118,7 +119,6 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'.gitlab-ci.yml' | [:tooling]
'.gitlab/ci/cng.gitlab-ci.yml' | [:tooling]
'.gitlab/ci/ee-specific-checks.gitlab-ci.yml' | [:tooling]
'scripts/foo' | [:tooling]
'tooling/danger/foo' | [:tooling]
'ee/tooling/danger/foo' | [:tooling]
'lefthook.yml' | [:tooling]

View File

@ -101,9 +101,13 @@ module Tooling
%r{\A\.editorconfig\z} => :tooling,
%r{Dangerfile\z} => :tooling,
%r{\A((ee|jh)/)?(danger/|tooling/danger/)} => :tooling,
%r{\A((ee|jh)/)?(lib/)?scripts/.*\.rb} => [:backend, :tooling],
%r{\A((ee|jh)/)?(lib/)?scripts/.*\.js} => [:frontend, :tooling],
%r{\A((ee|jh)/)?scripts/(lib/)?glfm/.*\.rb} => [:backend],
%r{\A((ee|jh)/)?scripts/(lib/)?glfm/.*\.js} => [:frontend],
%r{\A((ee|jh)/)?scripts/.*\.rb} => [:backend, :tooling],
%r{\A((ee|jh)/)?scripts/.*\.js} => [:frontend, :tooling],
%r{\A((ee|jh)/)?scripts/} => :tooling,
%r{\Atooling/} => :tooling,
%r{(CODEOWNERS)} => :tooling,
%r{(tests.yml)} => :tooling,