Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-04-22 03:13:56 +00:00
parent 24b2c96f3b
commit c4f5b119a2
22 changed files with 935 additions and 370 deletions

View File

@ -4,7 +4,7 @@ import { __ } from '~/locale';
import PackagesSettings from '~/packages_and_registries/settings/group/components/packages_settings.vue';
import PackagesForwardingSettings from '~/packages_and_registries/settings/group/components/packages_forwarding_settings.vue';
import DependencyProxySettings from '~/packages_and_registries/settings/group/components/dependency_proxy_settings.vue';
import glAbilitiesMixin from '~/vue_shared/mixins/gl_abilities_mixin';
import getGroupPackagesSettingsQuery from '~/packages_and_registries/settings/group/graphql/queries/get_group_packages_settings.query.graphql';
export default {
@ -15,6 +15,7 @@ export default {
PackagesForwardingSettings,
DependencyProxySettings,
},
mixins: [glAbilitiesMixin()],
inject: ['groupPath'],
apollo: {
group: {
@ -88,6 +89,7 @@ export default {
/>
<dependency-proxy-settings
v-if="glAbilities.adminDependencyProxy"
id="dependency-proxy-settings"
:dependency-proxy-settings="dependencyProxySettings"
:dependency-proxy-image-ttl-policy="dependencyProxyImageTtlPolicy"

View File

@ -1,51 +1,21 @@
<script>
import { GlCollapsibleListbox, GlFormGroup } from '@gitlab/ui';
import { s__ } from '~/locale';
import { createAlert } from '~/alert';
import { getDateInPast } from '~/lib/utils/datetime_utility';
import {
DATE_RANGE_LAST_WEEK,
DATE_RANGE_LAST_30_DAYS,
DATE_RANGE_LAST_90_DAYS,
DATE_RANGE_LAST_180_DAYS,
SOURCE_PUSH,
SOURCE_SCHEDULE,
SOURCE_MERGE_REQUEST_EVENT,
SOURCE_WEB,
SOURCE_TRIGGER,
SOURCE_API,
SOURCE_EXTERNAL,
SOURCE_PIPELINE,
SOURCE_CHAT,
SOURCE_WEBIDE,
SOURCE_EXTERNAL_PULL_REQUEST_EVENT,
SOURCE_PARENT_PIPELINE,
SOURCE_ONDEMAND_DAST_SCAN,
SOURCE_ONDEMAND_DAST_VALIDATION,
SOURCE_SECURITY_ORCHESTRATION_POLICY,
SOURCE_CONTAINER_REGISTRY_PUSH,
SOURCE_DUO_WORKFLOW,
SOURCE_PIPELINE_EXECUTION_POLICY_SCHEDULE,
SOURCE_UNKNOWN,
} from '../constants';
import { SOURCE_ANY, DATE_RANGE_7_DAYS, DATE_RANGES_AS_DAYS } from '../constants';
import getPipelineAnalytics from '../graphql/queries/get_pipeline_analytics.query.graphql';
import DashboardHeader from './dashboard_header.vue';
import BranchCollapsibleListbox from './branch_collapsible_listbox.vue';
import PipelinesDashboardClickhouseFilters from './pipelines_dashboard_clickhouse_filters.vue';
import StatisticsList from './statistics_list.vue';
import PipelineDurationChart from './pipeline_duration_chart.vue';
import PipelineStatusChart from './pipeline_status_chart.vue';
const SOURCE_ANY = 'ANY';
export default {
name: 'PipelinesDashboardClickhouse',
components: {
GlCollapsibleListbox,
GlFormGroup,
DashboardHeader,
BranchCollapsibleListbox,
PipelinesDashboardClickhouseFilters,
StatisticsList,
PipelineDurationChart,
PipelineStatusChart,
@ -66,9 +36,11 @@ export default {
},
data() {
return {
source: SOURCE_ANY,
dateRange: DATE_RANGE_LAST_WEEK,
branch: this.defaultBranch,
params: {
source: SOURCE_ANY,
dateRange: DATE_RANGE_7_DAYS,
branch: this.defaultBranch,
},
pipelineAnalytics: {
aggregate: {
count: null,
@ -110,9 +82,9 @@ export default {
return {
fullPath: this.projectPath,
source: this.source === SOURCE_ANY ? null : this.source,
branch: this.branch,
fromTime: getDateInPast(today, this.dateRange),
source: this.params.source === SOURCE_ANY ? null : this.params.source,
branch: this.params.branch || null,
fromTime: getDateInPast(today, DATE_RANGES_AS_DAYS[this.params.dateRange] || 7),
toTime: today,
};
},
@ -127,46 +99,6 @@ export default {
};
},
},
pipelineSources: [
{ value: SOURCE_ANY, text: s__('PipelineSource|Any source') },
{ value: SOURCE_PUSH, text: s__('PipelineSource|Push') },
{ value: SOURCE_SCHEDULE, text: s__('PipelineSource|Schedule') },
{ value: SOURCE_MERGE_REQUEST_EVENT, text: s__('PipelineSource|Merge Request Event') },
{ value: SOURCE_WEB, text: s__('PipelineSource|Web') },
{ value: SOURCE_TRIGGER, text: s__('PipelineSource|Trigger') },
{ value: SOURCE_API, text: s__('PipelineSource|API') },
{ value: SOURCE_EXTERNAL, text: s__('PipelineSource|External') },
{ value: SOURCE_PIPELINE, text: s__('PipelineSource|Pipeline') },
{ value: SOURCE_CHAT, text: s__('PipelineSource|Chat') },
{ value: SOURCE_WEBIDE, text: s__('PipelineSource|Web IDE') },
{
value: SOURCE_EXTERNAL_PULL_REQUEST_EVENT,
text: s__('PipelineSource|External Pull Request Event'),
},
{ value: SOURCE_PARENT_PIPELINE, text: s__('PipelineSource|Parent Pipeline') },
{ value: SOURCE_ONDEMAND_DAST_SCAN, text: s__('PipelineSource|On-Demand DAST Scan') },
{
value: SOURCE_ONDEMAND_DAST_VALIDATION,
text: s__('PipelineSource|On-Demand DAST Validation'),
},
{
value: SOURCE_SECURITY_ORCHESTRATION_POLICY,
text: s__('PipelineSource|Security Orchestration Policy'),
},
{ value: SOURCE_CONTAINER_REGISTRY_PUSH, text: s__('PipelineSource|Container Registry Push') },
{ value: SOURCE_DUO_WORKFLOW, text: s__('PipelineSource|Duo Workflow') },
{
value: SOURCE_PIPELINE_EXECUTION_POLICY_SCHEDULE,
text: s__('PipelineSource|Pipeline Execution Policy Schedule'),
},
{ value: SOURCE_UNKNOWN, text: s__('PipelineSource|Unknown') },
],
dateRangeItems: [
{ value: DATE_RANGE_LAST_WEEK, text: s__('PipelineCharts|Last week') },
{ value: DATE_RANGE_LAST_30_DAYS, text: s__('PipelineCharts|Last 30 days') },
{ value: DATE_RANGE_LAST_90_DAYS, text: s__('PipelineCharts|Last 90 days') },
{ value: DATE_RANGE_LAST_180_DAYS, text: s__('PipelineCharts|Last 180 days') },
],
};
</script>
<template>
@ -174,42 +106,12 @@ export default {
<dashboard-header>
{{ s__('PipelineCharts|Pipelines') }}
</dashboard-header>
<div class="gl-mb-4 gl-flex gl-flex-wrap gl-gap-4 gl-bg-subtle gl-p-4 gl-pb-2">
<gl-form-group
class="gl-min-w-full sm:gl-min-w-20"
:label="s__('PipelineCharts|Source')"
label-for="pipeline-source"
>
<gl-collapsible-listbox
id="pipeline-source"
v-model="source"
block
:items="$options.pipelineSources"
/>
</gl-form-group>
<gl-form-group class="gl-min-w-full sm:gl-min-w-26" :label="__('Branch')" label-for="branch">
<branch-collapsible-listbox
id="branch"
v-model="branch"
block
:default-branch="defaultBranch"
:project-path="projectPath"
:project-branch-count="projectBranchCount"
/>
</gl-form-group>
<gl-form-group
class="gl-min-w-full sm:gl-min-w-15"
:label="__('Date range')"
label-for="date-range"
>
<gl-collapsible-listbox
id="date-range"
v-model="dateRange"
block
:items="$options.dateRangeItems"
/>
</gl-form-group>
</div>
<pipelines-dashboard-clickhouse-filters
v-model="params"
:default-branch="defaultBranch"
:project-path="projectPath"
:project-branch-count="projectBranchCount"
/>
<div>
<statistics-list :loading="loading" :counts="formattedCounts" />
<pipeline-duration-chart :loading="loading" :time-series="pipelineAnalytics.timeSeries" />

View File

@ -0,0 +1,171 @@
<script>
import { GlCollapsibleListbox, GlFormGroup } from '@gitlab/ui';
import { s__ } from '~/locale';
import {
DATE_RANGE_7_DAYS,
DATE_RANGE_30_DAYS,
DATE_RANGE_90_DAYS,
DATE_RANGE_180_DAYS,
DATE_RANGE_DEFAULT,
SOURCE_ANY,
SOURCE_PUSH,
SOURCE_SCHEDULE,
SOURCE_MERGE_REQUEST_EVENT,
SOURCE_WEB,
SOURCE_TRIGGER,
SOURCE_API,
SOURCE_EXTERNAL,
SOURCE_PIPELINE,
SOURCE_CHAT,
SOURCE_WEBIDE,
SOURCE_EXTERNAL_PULL_REQUEST_EVENT,
SOURCE_PARENT_PIPELINE,
SOURCE_ONDEMAND_DAST_SCAN,
SOURCE_ONDEMAND_DAST_VALIDATION,
SOURCE_SECURITY_ORCHESTRATION_POLICY,
SOURCE_CONTAINER_REGISTRY_PUSH,
SOURCE_DUO_WORKFLOW,
SOURCE_PIPELINE_EXECUTION_POLICY_SCHEDULE,
SOURCE_UNKNOWN,
} from '../constants';
import BranchCollapsibleListbox from './branch_collapsible_listbox.vue';
const sourcesItems = [
{ value: SOURCE_ANY, text: s__('PipelineSource|Any source') },
{ value: SOURCE_PUSH, text: s__('PipelineSource|Push') },
{ value: SOURCE_SCHEDULE, text: s__('PipelineSource|Schedule') },
{ value: SOURCE_MERGE_REQUEST_EVENT, text: s__('PipelineSource|Merge Request Event') },
{ value: SOURCE_WEB, text: s__('PipelineSource|Web') },
{ value: SOURCE_TRIGGER, text: s__('PipelineSource|Trigger') },
{ value: SOURCE_API, text: s__('PipelineSource|API') },
{ value: SOURCE_EXTERNAL, text: s__('PipelineSource|External') },
{ value: SOURCE_PIPELINE, text: s__('PipelineSource|Pipeline') },
{ value: SOURCE_CHAT, text: s__('PipelineSource|Chat') },
{ value: SOURCE_WEBIDE, text: s__('PipelineSource|Web IDE') },
{
value: SOURCE_EXTERNAL_PULL_REQUEST_EVENT,
text: s__('PipelineSource|External Pull Request Event'),
},
{ value: SOURCE_PARENT_PIPELINE, text: s__('PipelineSource|Parent Pipeline') },
{ value: SOURCE_ONDEMAND_DAST_SCAN, text: s__('PipelineSource|On-Demand DAST Scan') },
{
value: SOURCE_ONDEMAND_DAST_VALIDATION,
text: s__('PipelineSource|On-Demand DAST Validation'),
},
{
value: SOURCE_SECURITY_ORCHESTRATION_POLICY,
text: s__('PipelineSource|Security Orchestration Policy'),
},
{ value: SOURCE_CONTAINER_REGISTRY_PUSH, text: s__('PipelineSource|Container Registry Push') },
{ value: SOURCE_DUO_WORKFLOW, text: s__('PipelineSource|Duo Workflow') },
{
value: SOURCE_PIPELINE_EXECUTION_POLICY_SCHEDULE,
text: s__('PipelineSource|Pipeline Execution Policy Schedule'),
},
{ value: SOURCE_UNKNOWN, text: s__('PipelineSource|Unknown') },
];
const dateRangeItems = [
{ value: DATE_RANGE_7_DAYS, text: s__('PipelineCharts|Last week') },
{ value: DATE_RANGE_30_DAYS, text: s__('PipelineCharts|Last 30 days') },
{ value: DATE_RANGE_90_DAYS, text: s__('PipelineCharts|Last 90 days') },
{ value: DATE_RANGE_180_DAYS, text: s__('PipelineCharts|Last 180 days') },
];
export default {
name: 'PipelinesDashboardClickhouseFilters',
components: {
GlCollapsibleListbox,
GlFormGroup,
BranchCollapsibleListbox,
},
props: {
value: {
type: Object,
default: () => ({
source: SOURCE_ANY,
dateRange: DATE_RANGE_7_DAYS,
branch: null,
}),
required: false,
},
defaultBranch: {
type: String,
required: false,
default: null,
},
projectPath: {
type: String,
required: true,
},
projectBranchCount: {
type: Number,
required: true,
default: 0,
},
},
data() {
const { source, branch, dateRange } = this.value;
const isValidSource = sourcesItems.map(({ value }) => value).includes(source);
const isValidDateRange = dateRangeItems.map(({ value }) => value).includes(dateRange);
return {
params: {
source: isValidSource ? source : SOURCE_ANY,
dateRange: isValidDateRange ? dateRange : DATE_RANGE_DEFAULT,
branch: branch || this.defaultBranch,
},
};
},
watch: {
params: {
handler(params) {
this.$emit('input', params);
},
deep: true,
},
},
sourcesItems,
dateRangeItems,
};
</script>
<template>
<div class="gl-mb-4 gl-flex gl-flex-wrap gl-gap-4 gl-bg-subtle gl-p-4 gl-pb-2">
<gl-form-group
class="gl-min-w-full sm:gl-min-w-20"
:label="s__('PipelineCharts|Source')"
label-for="pipeline-source"
>
<gl-collapsible-listbox
id="pipeline-source"
v-model="params.source"
block
:items="$options.sourcesItems"
/>
</gl-form-group>
<gl-form-group class="gl-min-w-full sm:gl-min-w-26" :label="__('Branch')" label-for="branch">
<branch-collapsible-listbox
id="branch"
v-model="params.branch"
block
:default-branch="defaultBranch"
:project-path="projectPath"
:project-branch-count="projectBranchCount"
/>
</gl-form-group>
<gl-form-group
class="gl-min-w-full sm:gl-min-w-15"
:label="__('Date range')"
label-for="date-range"
>
<gl-collapsible-listbox
id="date-range"
v-model="params.dateRange"
block
:items="$options.dateRangeItems"
/>
</gl-form-group>
</div>
</template>

View File

@ -10,10 +10,18 @@ export const ONE_WEEK_AGO_DAYS = 7;
export const ONE_MONTH_AGO_DAYS = 31;
export const ONE_YEAR_AGO_DAYS = 365;
export const DATE_RANGE_LAST_WEEK = 7;
export const DATE_RANGE_LAST_30_DAYS = 30;
export const DATE_RANGE_LAST_90_DAYS = 90;
export const DATE_RANGE_LAST_180_DAYS = 180;
export const DATE_RANGE_7_DAYS = '7d';
export const DATE_RANGE_30_DAYS = '30d';
export const DATE_RANGE_90_DAYS = '90d';
export const DATE_RANGE_180_DAYS = '180d';
export const DATE_RANGES_AS_DAYS = {
[DATE_RANGE_7_DAYS]: 7,
[DATE_RANGE_30_DAYS]: 30,
[DATE_RANGE_90_DAYS]: 90,
[DATE_RANGE_180_DAYS]: 180,
};
export const DATE_RANGE_DEFAULT = DATE_RANGE_7_DAYS;
export const DEFAULT = 'default';
export const PARSE_FAILURE = 'parse_failure';
@ -26,6 +34,7 @@ export const SNOWPLOW_SCHEMA = 'iglu:com.gitlab/gitlab_service_ping/jsonschema/1
export const SNOWPLOW_DATA_SOURCE = 'redis_hll';
// CiPipelineSources values from GraphQL schema.
export const SOURCE_ANY = 'ANY'; // This is a special value, not part of CiPipelineSources.
export const SOURCE_PUSH = 'PUSH';
export const SOURCE_SCHEDULE = 'SCHEDULE';
export const SOURCE_MERGE_REQUEST_EVENT = 'MERGE_REQUEST_EVENT';

View File

@ -9,6 +9,7 @@ module Groups
before_action do
push_frontend_feature_flag(:maven_central_request_forwarding, group)
push_frontend_ability(ability: :admin_dependency_proxy, resource: group, user: current_user)
end
feature_category :package_registry

View File

@ -13,6 +13,12 @@ module Users
def pin_key
"#{SUPPORT_PIN_PREFIX}:#{@user.id}"
end
def pin_exists?
Gitlab::Redis::Cache.with do |redis|
redis.exists(pin_key).to_i > 0
end
end
end
end
end

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
module Users
module SupportPin
class RevokeService < SupportPin::BaseService
def execute
return { status: :not_found, message: 'Support PIN not found or already expired' } unless pin_exists?
revoked = revoke_pin
if revoked
{ status: :success }
else
{ status: :error, message: 'Failed to revoke support PIN' }
end
end
private
def revoke_pin
Gitlab::Redis::Cache.with do |redis|
key = pin_key
redis.expire(key, 0) # Set to expire immediately
end
end
end
end
end

View File

@ -8,14 +8,6 @@ classes:
feature_categories:
- geo_replication
gitlab_schema: gitlab_main_cell
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: container_repository_id
table: container_repositories
sharding_key: project_id
belongs_to: container_repository
desired_sharding_key_migration_job_name: BackfillContainerRepositoryStatesProjectId
sharding_key:
project_id: projects
table_size: small

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class AddContainerRepositoryStatesProjectIdNotNull < Gitlab::Database::Migration[2.2]
milestone '18.0'
disable_ddl_transaction!
def up
add_not_null_constraint :container_repository_states, :project_id
end
def down
remove_not_null_constraint :container_repository_states, :project_id
end
end

View File

@ -0,0 +1 @@
695dc230a83ccefe9910f9ebbb4f97e7be3c78fe0a2ed8587dfa139c7c6aa1b2

View File

@ -12905,7 +12905,8 @@ CREATE TABLE container_repository_states (
verification_checksum bytea,
verification_failure text,
project_id bigint,
CONSTRAINT check_c96417dbc5 CHECK ((char_length(verification_failure) <= 255))
CONSTRAINT check_c96417dbc5 CHECK ((char_length(verification_failure) <= 255)),
CONSTRAINT check_d65b1f0839 CHECK ((project_id IS NOT NULL))
);
CREATE TABLE content_blocked_states (

View File

@ -1584,3 +1584,48 @@ Supported attributes:
| Attribute | Type | Required | Description |
|:-----------------------|:---------|:---------|:------------|
| `id` | integer | yes | ID of user account |
## Revoke a Support PIN for a user
{{< details >}}
- Tier: Free, Premium, Ultimate
- Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated
{{< /details >}}
{{< history >}}
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/187657)
in GitLab 17.11.
{{< /history >}}
Revokes a Support PIN for the specified user before its natural expiration.
This immediately expires and removes the PIN.
Prerequisites:
- You must be an administrator.
```plaintext
POST /users/:id/support_pin/revoke
```
Example request:
```shell
curl --request POST \
--header "PRIVATE-TOKEN: <your_access_token>" \
--url "https://gitlab.example.com/api/v4/users/1234/support_pin/revoke"
```
Example response:
If successful, returns `202 Accepted`.
Supported attributes:
| Attribute | Type | Required | Description |
|:-------------|:----------|:---------|:--------------------|
| `id` | integer | yes | ID of a user |

View File

@ -129,6 +129,36 @@ module API
end
end
desc 'Revoke support PIN for a user. Available only for admins.' do
detail 'This feature allows administrators to revoke the support PIN for a specified user before its natural expiration'
success code: 204
is_array false
end
params do
requires :id, type: Integer, desc: 'The ID of the user'
end
post ":id/support_pin/revoke", feature_category: :user_management do
authenticated_as_admin!
user = User.find_by_id(params[:id])
not_found!('User') unless user
begin
result = ::Users::SupportPin::RevokeService.new(user).execute
rescue StandardError
error!("Error revoking Support PIN for user.", :unprocessable_entity)
end
case result[:status]
when :success
status :accepted
when :not_found
not_found!(result[:message])
else
error!(result[:message] || "Failed to revoke Support PIN", :bad_request)
end
end
desc 'Get the list of users' do
success Entities::UserBasic
end

View File

@ -6,17 +6,17 @@ module Gitlab
class ReplaceTable
DELIMITER = ";\n\n"
attr_reader :original_table, :replacement_table, :replaced_table, :primary_key_column,
attr_reader :original_table, :replacement_table, :replaced_table, :primary_key_columns,
:sequence, :original_primary_key, :replacement_primary_key, :replaced_primary_key
def initialize(connection, original_table, replacement_table, replaced_table, primary_key_column)
def initialize(connection, original_table, replacement_table, replaced_table, primary_key_columns)
@connection = connection
@original_table = original_table
@replacement_table = replacement_table
@replaced_table = replaced_table
@primary_key_column = primary_key_column
@primary_key_columns = Array(primary_key_columns)
@sequence = default_sequence(original_table, primary_key_column)
@sequence = default_sequence(original_table, @primary_key_columns.first)
@original_primary_key = default_primary_key(original_table)
@replacement_primary_key = default_primary_key(replacement_table)
@replaced_primary_key = default_primary_key(replaced_table)
@ -48,9 +48,10 @@ module Gitlab
def combined_sql_statements
statements = []
first_pk_column = primary_key_columns.first
statements << alter_column_default(original_table, primary_key_column, expression: nil)
statements << alter_column_default(replacement_table, primary_key_column,
statements << alter_column_default(original_table, first_pk_column, expression: nil)
statements << alter_column_default(replacement_table, first_pk_column,
expression: "nextval('#{quote_table_name(sequence)}'::regclass)")
# If a different user owns the old table, the conversion process will fail to reassign the sequence
@ -60,7 +61,7 @@ module Gitlab
statements << set_table_owner_statement(original_table, table_owner(replacement_table))
end
statements << alter_sequence_owned_by(sequence, replacement_table, primary_key_column)
statements << alter_sequence_owned_by(sequence, replacement_table, first_pk_column)
rename_table_objects(statements, original_table, replaced_table, original_primary_key, replaced_primary_key)
rename_table_objects(statements, replacement_table, original_table, replacement_primary_key, original_primary_key)

View File

@ -28,11 +28,13 @@ describe('Group Settings App', () => {
const defaultProvide = {
groupPath: 'foo_group_path',
glAbilities: {},
};
const mountComponent = ({
resolver = jest.fn().mockResolvedValue(groupPackageSettingsMock),
provide = defaultProvide,
adminDependencyProxyAbility = true,
} = {}) => {
Vue.use(VueApollo);
@ -42,7 +44,12 @@ describe('Group Settings App', () => {
wrapper = shallowMount(component, {
apolloProvider,
provide,
provide: {
...provide,
glAbilities: {
adminDependencyProxy: adminDependencyProxyAbility,
},
},
mocks: {
$toast: {
show,
@ -139,4 +146,21 @@ describe('Group Settings App', () => {
});
});
});
describe('when ability adminDependencyProxy is false', () => {
beforeEach(() => {
mountComponent({
adminDependencyProxyAbility: false,
});
});
it('does not render dependency proxy settings section', () => {
expect(findDependencyProxySettings().exists()).toBe(false);
});
it('renders other settings section', () => {
expect(findPackageSettings().exists()).toBe(true);
expect(findPackageForwardingSettings().exists()).toBe(true);
});
});
});

View File

@ -0,0 +1,197 @@
import { GlCollapsibleListbox } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import PipelinesDashboardClickhouseFilters from '~/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_filters.vue';
import BranchCollapsibleListbox from '~/projects/pipelines/charts/components/branch_collapsible_listbox.vue';
jest.mock('~/alert');
const projectPath = 'gitlab-org/gitlab';
const defaultBranch = 'main';
const projectBranchCount = 99;
describe('PipelinesDashboardClickhouseFilters', () => {
let wrapper;
const findCollapsibleListbox = (id) =>
wrapper.findAllComponents(GlCollapsibleListbox).wrappers.find((w) => w.attributes('id') === id);
const findBranchCollapsibleListbox = () => wrapper.findComponent(BranchCollapsibleListbox);
const createComponent = ({ props, mountFn = shallowMount } = {}) => {
wrapper = mountFn(PipelinesDashboardClickhouseFilters, {
propsData: {
defaultBranch,
projectPath,
projectBranchCount,
...props,
},
});
};
describe('input', () => {
it('does not emit immediately', () => {
createComponent();
expect(wrapper.emitted('input')).toBeUndefined();
});
});
describe('source', () => {
beforeEach(() => {
createComponent();
});
it('shows options', () => {
const sources = findCollapsibleListbox('pipeline-source')
.props('items')
.map(({ text }) => text);
expect(sources).toEqual([
'Any source',
'Push',
'Schedule',
'Merge Request Event',
'Web',
'Trigger',
'API',
'External',
'Pipeline',
'Chat',
'Web IDE',
'External Pull Request Event',
'Parent Pipeline',
'On-Demand DAST Scan',
'On-Demand DAST Validation',
'Security Orchestration Policy',
'Container Registry Push',
'Duo Workflow',
'Pipeline Execution Policy Schedule',
'Unknown',
]);
});
it('is "Any" by default', () => {
expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('ANY');
});
it('sets selected value', () => {
createComponent({
props: {
value: {
source: 'PUSH',
},
},
});
expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('PUSH');
});
it('does not set invalid value as selected', () => {
createComponent({
props: {
value: {
source: 'NOT_AN_OPTION',
},
},
});
expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('ANY');
});
it('emits when an option is selected', async () => {
findCollapsibleListbox('pipeline-source').vm.$emit('select', 'PUSH');
await nextTick();
expect(wrapper.emitted('input')[0][0]).toEqual({
branch: defaultBranch,
dateRange: '7d',
source: 'PUSH',
});
});
});
describe('branch', () => {
beforeEach(() => {
createComponent();
});
it('shows listbox with default branch as default value', () => {
expect(findBranchCollapsibleListbox().props()).toMatchObject({
selected: defaultBranch,
defaultBranch,
projectPath,
projectBranchCount,
});
});
it('is the default branch by default', () => {
expect(findBranchCollapsibleListbox().props('selected')).toBe(defaultBranch);
});
it('sets selected value', () => {
createComponent({
props: {
value: {
branch: 'my-branch-0',
},
},
});
expect(findBranchCollapsibleListbox().props('selected')).toBe('my-branch-0');
});
it('emits when an option is selected', async () => {
findBranchCollapsibleListbox().vm.$emit('select', 'my-branch-1');
await nextTick();
expect(wrapper.emitted('input')[0][0]).toEqual({
branch: 'my-branch-1',
dateRange: '7d',
source: 'ANY',
});
});
});
describe('date range', () => {
beforeEach(() => {
createComponent();
});
it('shows options', () => {
const ranges = findCollapsibleListbox('date-range')
.props('items')
.map(({ text }) => text);
expect(ranges).toEqual(['Last week', 'Last 30 days', 'Last 90 days', 'Last 180 days']);
});
it('is "Last 7 days" by default', () => {
expect(findCollapsibleListbox('date-range').props('selected')).toBe('7d');
});
it('does not set invalid value as selected', () => {
createComponent({
props: {
value: {
source: 'NOT_AN_OPTION',
},
},
});
expect(findCollapsibleListbox('date-range').props('selected')).toBe('7d');
});
it('emits when an option is selected', async () => {
findCollapsibleListbox('date-range').vm.$emit('select', '90d');
await nextTick();
expect(wrapper.emitted('input')[0][0]).toEqual({
dateRange: '90d',
branch: defaultBranch,
source: 'ANY',
});
});
});
});

View File

@ -1,4 +1,4 @@
import { GlCollapsibleListbox } from '@gitlab/ui';
import { GlTruncate } from '@gitlab/ui';
import { GlSingleStat } from '@gitlab/ui/dist/charts';
import { shallowMount, mount } from '@vue/test-utils';
import Vue from 'vue';
@ -6,7 +6,13 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelinesDashboardClickhouse from '~/projects/pipelines/charts/components/pipelines_dashboard_clickhouse.vue';
import BranchCollapsibleListbox from '~/projects/pipelines/charts/components/branch_collapsible_listbox.vue';
import {
SOURCE_ANY,
SOURCE_PUSH,
DATE_RANGE_7_DAYS,
DATE_RANGE_30_DAYS,
} from '~/projects/pipelines/charts/constants';
import PipelinesDashboardClickhouseFilters from '~/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_filters.vue';
import StatisticsList from '~/projects/pipelines/charts/components/statistics_list.vue';
import PipelineDurationChart from '~/projects/pipelines/charts/components/pipeline_duration_chart.vue';
import PipelineStatusChart from '~/projects/pipelines/charts/components/pipeline_status_chart.vue';
@ -28,22 +34,25 @@ describe('PipelinesDashboardClickhouse', () => {
let wrapper;
let getPipelineAnalyticsHandler;
const findCollapsibleListbox = (id) =>
wrapper.findAllComponents(GlCollapsibleListbox).wrappers.find((w) => w.attributes('id') === id);
const findBranchCollapsibleListbox = () => wrapper.findComponent(BranchCollapsibleListbox);
const findPipelinesDashboardClickhouseFilters = () =>
wrapper.findComponent(PipelinesDashboardClickhouseFilters);
const findStatisticsList = () => wrapper.findComponent(StatisticsList);
const findPipelineDurationChart = () => wrapper.findComponent(PipelineDurationChart);
const findPipelineStatusChart = () => wrapper.findComponent(PipelineStatusChart);
const findAllSingleStats = () => wrapper.findAllComponents(GlSingleStat);
const createComponent = ({ mountFn = shallowMount } = {}) => {
const createComponent = ({ mountFn = shallowMount, ...options } = {}) => {
wrapper = mountFn(PipelinesDashboardClickhouse, {
provide: {
defaultBranch,
projectPath,
projectBranchCount,
},
stubs: {
GlTruncate,
},
apolloProvider: createMockApollo([[getPipelineAnalyticsQuery, getPipelineAnalyticsHandler]]),
...options,
});
};
@ -63,43 +72,25 @@ describe('PipelinesDashboardClickhouse', () => {
});
});
describe('source', () => {
describe('filters', () => {
beforeEach(() => {
createComponent();
});
it('shows options', () => {
const sources = findCollapsibleListbox('pipeline-source')
.props('items')
.map(({ text }) => text);
expect(sources).toEqual([
'Any source',
'Push',
'Schedule',
'Merge Request Event',
'Web',
'Trigger',
'API',
'External',
'Pipeline',
'Chat',
'Web IDE',
'External Pull Request Event',
'Parent Pipeline',
'On-Demand DAST Scan',
'On-Demand DAST Validation',
'Security Orchestration Policy',
'Container Registry Push',
'Duo Workflow',
'Pipeline Execution Policy Schedule',
'Unknown',
]);
it('sets default filters', () => {
expect(findPipelinesDashboardClickhouseFilters().props()).toEqual({
defaultBranch: 'main',
projectBranchCount: 99,
projectPath: 'gitlab-org/gitlab',
value: {
source: SOURCE_ANY,
branch: defaultBranch,
dateRange: DATE_RANGE_7_DAYS,
},
});
});
it('is "Any" by default', async () => {
expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('ANY');
it('requests with default filters', async () => {
await waitForPromises();
expect(getPipelineAnalyticsHandler).toHaveBeenCalledTimes(1);
@ -112,101 +103,24 @@ describe('PipelinesDashboardClickhouse', () => {
});
});
it('is set when an option is selected', async () => {
findCollapsibleListbox('pipeline-source').vm.$emit('select', 'PUSH');
it('when an option is selected, requests with new filters', async () => {
await waitForPromises();
findPipelinesDashboardClickhouseFilters().vm.$emit('input', {
source: SOURCE_PUSH,
dateRange: DATE_RANGE_30_DAYS,
branch: 'feature-branch',
});
await waitForPromises();
expect(getPipelineAnalyticsHandler).toHaveBeenCalledTimes(2);
expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith({
source: 'PUSH',
fullPath: projectPath,
branch: defaultBranch,
fromTime: new Date('2022-02-08'),
toTime: new Date('2022-02-15'),
});
});
});
describe('branch', () => {
beforeEach(async () => {
createComponent();
await waitForPromises();
});
it('shows listbox with default branch as default value', () => {
expect(findBranchCollapsibleListbox().props()).toMatchObject({
block: true,
selected: 'main',
defaultBranch: 'main',
projectPath,
projectBranchCount,
});
});
it('is set when an option is selected', async () => {
findBranchCollapsibleListbox().vm.$emit('select', 'feature-branch');
await waitForPromises();
expect(getPipelineAnalyticsHandler).toHaveBeenCalledTimes(2);
expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith({
fromTime: new Date('2022-02-08'),
toTime: new Date('2022-02-15'),
source: SOURCE_PUSH,
fullPath: projectPath,
branch: 'feature-branch',
source: null,
});
});
});
describe('date range', () => {
beforeEach(async () => {
createComponent();
await waitForPromises();
});
it('shows listbox', () => {
expect(findCollapsibleListbox('date-range').props()).toMatchObject({
block: true,
selected: 7,
});
});
it('shows options', () => {
const ranges = findCollapsibleListbox('date-range')
.props('items')
.map(({ text }) => text);
expect(ranges).toEqual(['Last week', 'Last 30 days', 'Last 90 days', 'Last 180 days']);
});
it('is "Last 7 days" by default', () => {
expect(findCollapsibleListbox('date-range').props('selected')).toBe(7);
expect(getPipelineAnalyticsHandler).toHaveBeenCalledTimes(1);
expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith({
fromTime: new Date('2022-02-08'),
fromTime: new Date('2022-01-16'),
toTime: new Date('2022-02-15'),
fullPath: projectPath,
branch: defaultBranch,
source: null,
});
});
it('is set when an option is selected', async () => {
findCollapsibleListbox('date-range').vm.$emit('select', 90);
await waitForPromises();
expect(getPipelineAnalyticsHandler).toHaveBeenCalledTimes(2);
expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith({
fromTime: new Date('2021-11-17'),
toTime: new Date('2022-02-15'),
fullPath: projectPath,
branch: defaultBranch,
source: null,
});
});
});
@ -222,6 +136,7 @@ describe('PipelinesDashboardClickhouse', () => {
getPipelineAnalyticsHandler.mockResolvedValue(pipelineAnalyticsEmptyData);
createComponent({ mountFn: mount });
await waitForPromises();
expect(findStatisticsList().props('counts')).toEqual({

View File

@ -2,129 +2,147 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Partitioning::ReplaceTable, '#perform' do
RSpec.describe Gitlab::Database::Partitioning::ReplaceTable, '#perform', feature_category: :database do
include Database::TableSchemaHelpers
subject(:replace_table) do
described_class.new(connection, original_table, replacement_table, archived_table, 'id').perform
end
let(:original_table) { '_test_original_table' }
let(:replacement_table) { '_test_replacement_table' }
let(:archived_table) { '_test_archived_table' }
let(:original_sequence) { "#{original_table}_id_seq" }
let(:original_primary_key) { "#{original_table}_pkey" }
let(:replacement_primary_key) { "#{replacement_table}_pkey" }
let(:archived_primary_key) { "#{archived_table}_pkey" }
before do
connection.execute(<<~SQL)
CREATE TABLE #{original_table} (
id serial NOT NULL PRIMARY KEY,
original_column text NOT NULL,
created_at timestamptz NOT NULL);
CREATE TABLE #{replacement_table} (
id int NOT NULL,
replacement_column text NOT NULL,
created_at timestamptz NOT NULL,
PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
SQL
end
it 'replaces the current table, archiving the old' do
expect_table_to_be_replaced { replace_table }
end
it 'transfers the primary key sequence to the replacement table' do
expect(sequence_owned_by(original_table, 'id')).to eq(original_sequence)
expect(default_expression_for(original_table, 'id')).to eq("nextval('#{original_sequence}'::regclass)")
expect(sequence_owned_by(replacement_table, 'id')).to be_nil
expect(default_expression_for(replacement_table, 'id')).to be_nil
expect_table_to_be_replaced { replace_table }
expect(sequence_owned_by(original_table, 'id')).to eq(original_sequence)
expect(default_expression_for(original_table, 'id')).to eq("nextval('#{original_sequence}'::regclass)")
expect(sequence_owned_by(archived_table, 'id')).to be_nil
expect(default_expression_for(archived_table, 'id')).to be_nil
end
it 'renames the primary key constraints to match the new table names' do
expect_primary_keys_after_tables([original_table, replacement_table])
expect_table_to_be_replaced { replace_table }
expect_primary_keys_after_tables([original_table, archived_table])
end
context 'when the table has partitions' do
before do
connection.execute(<<~SQL)
CREATE TABLE gitlab_partitions_dynamic.#{replacement_table}_202001 PARTITION OF #{replacement_table}
FOR VALUES FROM ('2020-01-01') TO ('2020-02-01');
CREATE TABLE gitlab_partitions_dynamic.#{replacement_table}_202002 PARTITION OF #{replacement_table}
FOR VALUES FROM ('2020-02-01') TO ('2020-03-01');
SQL
context 'with a composite primary key' do
subject(:replace_table) do
described_class.new(connection, original_table, replacement_table, archived_table, %w[id created_at]).perform
end
it 'renames the partitions to match the new table name' do
expect(partitions_for_parent_table(original_table).count).to eq(0)
expect(partitions_for_parent_table(replacement_table).count).to eq(2)
let(:original_table) { '_test_original_table_composite' }
let(:replacement_table) { '_test_replacement_table_composite' }
let(:archived_table) { '_test_archived_table_composite' }
expect_table_to_be_replaced { replace_table }
let(:original_sequence) { "#{original_table}_id_seq" }
expect(partitions_for_parent_table(archived_table).count).to eq(0)
partitions = partitions_for_parent_table(original_table).all
expect(partitions.size).to eq(2)
expect(partitions[0]).to have_attributes(
identifier: "gitlab_partitions_dynamic.#{original_table}_202001",
condition: "FOR VALUES FROM ('2020-01-01 00:00:00+00') TO ('2020-02-01 00:00:00+00')")
expect(partitions[1]).to have_attributes(
identifier: "gitlab_partitions_dynamic.#{original_table}_202002",
condition: "FOR VALUES FROM ('2020-02-01 00:00:00+00') TO ('2020-03-01 00:00:00+00')")
end
it 'renames the primary key constraints to match the new partition names' do
original_partitions = ["#{replacement_table}_202001", "#{replacement_table}_202002"]
expect_primary_keys_after_tables(original_partitions, schema: 'gitlab_partitions_dynamic')
expect_table_to_be_replaced { replace_table }
renamed_partitions = ["#{original_table}_202001", "#{original_table}_202002"]
expect_primary_keys_after_tables(renamed_partitions, schema: 'gitlab_partitions_dynamic')
end
end
context 'when the source table is not owned by current user' do
let(:original_table_owner) { 'random_table_owner' }
let(:original_primary_key) { "#{original_table}_pkey" }
let(:replacement_primary_key) { "#{replacement_table}_pkey" }
let(:archived_primary_key) { "#{archived_table}_pkey" }
before do
connection.execute(<<~SQL)
CREATE USER #{original_table_owner};
ALTER TABLE #{original_table} OWNER TO #{original_table_owner}
CREATE TABLE #{original_table} (
id serial NOT NULL,
original_column text NOT NULL,
created_at timestamptz NOT NULL,
PRIMARY KEY (id, created_at));
CREATE TABLE #{replacement_table} (
id int NOT NULL,
replacement_column text NOT NULL,
created_at timestamptz NOT NULL,
PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
SQL
end
it 'replaces the current table, archiving the old' do
expect_table_to_be_replaced { replace_table }
end
end
def partitions_for_parent_table(table)
Gitlab::Database::PostgresPartition.for_parent_table(table)
end
it 'transfers the primary key sequence to the replacement table' do
expect(sequence_owned_by(original_table, 'id')).to eq(original_sequence)
expect(default_expression_for(original_table, 'id')).to eq("nextval('#{original_sequence}'::regclass)")
def expect_table_to_be_replaced(&block)
super(original_table: original_table, replacement_table: replacement_table, archived_table: archived_table, &block)
expect(sequence_owned_by(replacement_table, 'id')).to be_nil
expect(default_expression_for(replacement_table, 'id')).to be_nil
expect_table_to_be_replaced { replace_table }
expect(sequence_owned_by(original_table, 'id')).to eq(original_sequence)
expect(default_expression_for(original_table, 'id')).to eq("nextval('#{original_sequence}'::regclass)")
expect(sequence_owned_by(archived_table, 'id')).to be_nil
expect(default_expression_for(archived_table, 'id')).to be_nil
end
it 'renames the primary key constraints to match the new table names' do
expect_primary_keys_after_tables([original_table, replacement_table])
expect_table_to_be_replaced { replace_table }
expect_primary_keys_after_tables([original_table, archived_table])
end
it 'does not alter the created_at column defaults' do
expect(default_expression_for(original_table, 'created_at')).to be_nil
expect(default_expression_for(replacement_table, 'created_at')).to be_nil
expect_table_to_be_replaced { replace_table }
expect(default_expression_for(original_table, 'created_at')).to be_nil
expect(default_expression_for(archived_table, 'created_at')).to be_nil
end
context 'when the table has partitions' do
before do
connection.execute(<<~SQL)
CREATE TABLE gitlab_partitions_dynamic.#{replacement_table}_202001 PARTITION OF #{replacement_table}
FOR VALUES FROM ('2020-01-01') TO ('2020-02-01');
CREATE TABLE gitlab_partitions_dynamic.#{replacement_table}_202002 PARTITION OF #{replacement_table}
FOR VALUES FROM ('2020-02-01') TO ('2020-03-01');
SQL
end
it 'renames the partitions to match the new table name' do
expect(partitions_for_parent_table(original_table).count).to eq(0)
expect(partitions_for_parent_table(replacement_table).count).to eq(2)
expect_table_to_be_replaced { replace_table }
expect(partitions_for_parent_table(archived_table).count).to eq(0)
partitions = partitions_for_parent_table(original_table).all
expect(partitions.size).to eq(2)
expect(partitions[0]).to have_attributes(
identifier: "gitlab_partitions_dynamic.#{original_table}_202001",
condition: "FOR VALUES FROM ('2020-01-01 00:00:00+00') TO ('2020-02-01 00:00:00+00')")
expect(partitions[1]).to have_attributes(
identifier: "gitlab_partitions_dynamic.#{original_table}_202002",
condition: "FOR VALUES FROM ('2020-02-01 00:00:00+00') TO ('2020-03-01 00:00:00+00')")
end
it 'renames the primary key constraints to match the new partition names' do
original_partitions = ["#{replacement_table}_202001", "#{replacement_table}_202002"]
expect_primary_keys_after_tables(original_partitions, schema: 'gitlab_partitions_dynamic')
expect_table_to_be_replaced { replace_table }
renamed_partitions = ["#{original_table}_202001", "#{original_table}_202002"]
expect_primary_keys_after_tables(renamed_partitions, schema: 'gitlab_partitions_dynamic')
end
end
context 'when the source table is not owned by current user' do
let(:original_table_owner) { 'random_table_owner' }
before do
connection.execute(<<~SQL)
CREATE USER #{original_table_owner};
ALTER TABLE #{original_table} OWNER TO #{original_table_owner}
SQL
end
it 'replaces the current table, archiving the old' do
expect_table_to_be_replaced { replace_table }
end
end
def partitions_for_parent_table(table)
Gitlab::Database::PostgresPartition.for_parent_table(table)
end
def expect_table_to_be_replaced(&block)
super(
original_table: original_table,
replacement_table: replacement_table,
archived_table: archived_table,
&block
)
end
end
end

View File

@ -3,5 +3,9 @@
require 'spec_helper'
RSpec.describe "Search results for group settings", :js, feature_category: :global_search, type: :feature do
before do
stub_config(dependency_proxy: { enabled: true })
end
it_behaves_like 'all group settings sections exist and have correct anchor links'
end

View File

@ -5557,4 +5557,97 @@ RSpec.describe API::Users, :with_current_organization, :aggregate_failures, feat
end
end
end
describe 'POST /users/:id/support_pin/revoke' do
let(:path) { "/users/#{user.id}/support_pin/revoke" }
context 'when current user is an admin' do
context 'when a PIN exists' do
it 'returns accepted status' do
post api(path, admin, admin_mode: true)
expect(response).to have_gitlab_http_status(:accepted)
end
it 'revokes the pin' do
post api(path, admin, admin_mode: true)
# Verify PIN is no longer accessible after revocation
get api("/users/#{user.id}/support_pin", admin, admin_mode: true)
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when no PIN exists' do
it 'returns not found' do
post api(path, admin, admin_mode: true)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to include('Support PIN not found or already expired')
end
end
context 'when an error occurs during revocation' do
before do
allow_next_instance_of(Users::SupportPin::RevokeService) do |instance|
allow(instance).to receive(:execute).and_raise(StandardError, 'Something went wrong')
end
end
it 'returns unprocessable_entity' do
post api(path, admin, admin_mode: true)
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['error']).to eq('Error revoking Support PIN for user.')
end
end
context 'when the service returns an error status' do
before do
allow_next_instance_of(Users::SupportPin::RevokeService) do |instance|
allow(instance).to receive(:execute).and_return({ status: :error, message: 'Service error' })
end
end
it 'returns bad_request' do
post api(path, admin, admin_mode: true)
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('Service error')
end
end
end
context 'when current user is not an admin' do
before do
# First authenticate as the user to create their own PIN
post api("/user/support_pin", user)
end
it 'returns forbidden' do
# Attempt to revoke as non-admin
post api(path, user)
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not revoke the PIN' do
# Attempt to revoke as non-admin
post api(path, user)
# Verify PIN still exists via API
get api('/user/support_pin', user)
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when user is not authenticated' do
it 'returns unauthorized' do
post api(path)
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
end

View File

@ -0,0 +1,57 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::Settings::PackagesAndRegistriesController, feature_category: :package_registry do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :private) }
let(:packages_enabled) { true }
let(:dependency_proxy_enabled) { true }
before do
stub_config(packages: { enabled: packages_enabled })
stub_config(dependency_proxy: { enabled: dependency_proxy_enabled })
sign_in(user)
end
describe 'GET #show' do
subject(:request) { get group_settings_packages_and_registries_path(group) }
context 'when user is not authorized' do
it_behaves_like 'returning response status', :not_found
end
context 'when user is authorized' do
before_all do
group.add_owner(user)
end
it_behaves_like 'returning response status', :ok
it_behaves_like 'pushed feature flag', :maven_central_request_forwarding
it 'pushes adminDependencyProxy: true ability to frontend' do
request
expect(response.body).to have_pushed_frontend_ability(adminDependencyProxy: dependency_proxy_enabled)
end
context 'when packages config is disabled' do
let(:packages_enabled) { false }
it_behaves_like 'returning response status', :not_found
end
context 'when dependency proxy config is disabled' do
let(:dependency_proxy_enabled) { false }
it 'pushes adminDependencyProxy: false ability to frontend' do
request
expect(response.body).to have_pushed_frontend_ability(adminDependencyProxy: dependency_proxy_enabled)
end
end
end
end
end

View File

@ -0,0 +1,54 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Users::SupportPin::RevokeService, feature_category: :user_management do
let(:user) { create(:user) }
let(:service) { described_class.new(user) }
describe '#execute' do
context 'when a PIN exists' do
before do
# Create a PIN using the UpdateService
Users::SupportPin::UpdateService.new(user).execute
end
it 'revokes the PIN successfully' do
# Verify PIN exists before revocation
expect(Users::SupportPin::RetrieveService.new(user).execute).not_to be_nil
result = service.execute
expect(result[:status]).to eq(:success)
# Verify PIN is no longer accessible after revocation
expect(Users::SupportPin::RetrieveService.new(user).execute).to be_nil
end
end
context 'when no PIN exists' do
it 'returns not_found status' do
result = service.execute
expect(result[:status]).to eq(:not_found)
expect(result[:message]).to eq('Support PIN not found or already expired')
end
end
context 'when Redis operation fails' do
before do
# Create a PIN first
Users::SupportPin::UpdateService.new(user).execute
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:revoke_pin).and_return(false)
end
end
it 'returns an error' do
result = service.execute
expect(result).to eq({ status: :error, message: 'Failed to revoke support PIN' })
end
end
end
end