Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
63fde89de0
commit
e653e9a1c3
|
|
@ -1 +1 @@
|
|||
02a0a2dc23f5e9503ad06ce58c6b0f22a60487fc
|
||||
0105df4bc690f9a56806eb72ebec59b1c5396d6d
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
188b1e18a471cef37ddd81fdf0c643c0e1c235da
|
||||
6a50d274511d46d6baa32c9ef7654b6432e3e611
|
||||
|
|
|
|||
|
|
@ -1,160 +0,0 @@
|
|||
<script>
|
||||
import {
|
||||
GlIcon,
|
||||
GlDaterangePicker,
|
||||
GlFilteredSearchToken,
|
||||
GlFilteredSearchSuggestion,
|
||||
GlOutsideDirective,
|
||||
} from '@gitlab/ui';
|
||||
import { __ } from '~/locale';
|
||||
import { formatDate } from '~/lib/utils/datetime/date_format_utility';
|
||||
|
||||
const CUSTOM_DATE_FILTER_TYPE = 'custom-date';
|
||||
|
||||
export default {
|
||||
directives: { Outside: GlOutsideDirective },
|
||||
components: {
|
||||
GlIcon,
|
||||
GlDaterangePicker,
|
||||
GlFilteredSearchToken,
|
||||
GlFilteredSearchSuggestion,
|
||||
},
|
||||
props: {
|
||||
active: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
value: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
config: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
datePickerShown: false,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
isActive() {
|
||||
return this.datePickerShown || this.active;
|
||||
},
|
||||
computedValue() {
|
||||
if (this.datePickerShown) {
|
||||
return {
|
||||
...this.value,
|
||||
data: '',
|
||||
};
|
||||
}
|
||||
return this.value;
|
||||
},
|
||||
dataSegmentInputAttributes() {
|
||||
const id = 'time_range_data_segment_input';
|
||||
if (this.datePickerShown) {
|
||||
return {
|
||||
id,
|
||||
placeholder: 'YYYY-MM-DD - YYYY-MM-DD', // eslint-disable-line @gitlab/require-i18n-strings
|
||||
style: 'padding-left: 23px;',
|
||||
};
|
||||
}
|
||||
return {
|
||||
id,
|
||||
};
|
||||
},
|
||||
computedConfig() {
|
||||
return {
|
||||
...this.config,
|
||||
options: undefined, // remove options from config to avoid default options being rendered
|
||||
};
|
||||
},
|
||||
suggestions() {
|
||||
const suggestions = this.config.options.map((option) => ({
|
||||
value: option.value,
|
||||
text: option.title,
|
||||
}));
|
||||
suggestions.push({ value: CUSTOM_DATE_FILTER_TYPE, text: __('Custom') });
|
||||
return suggestions;
|
||||
},
|
||||
defaultStartDate() {
|
||||
return new Date();
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
hideDatePicker() {
|
||||
this.datePickerShown = false;
|
||||
},
|
||||
showDatePicker() {
|
||||
this.datePickerShown = true;
|
||||
},
|
||||
handleClickOutside() {
|
||||
this.hideDatePicker();
|
||||
},
|
||||
handleComplete(value) {
|
||||
if (value === CUSTOM_DATE_FILTER_TYPE) {
|
||||
this.showDatePicker();
|
||||
}
|
||||
},
|
||||
selectValue(inputValue, submitValue) {
|
||||
let value = inputValue;
|
||||
if (typeof inputValue === 'object' && inputValue.startDate && inputValue.endDate) {
|
||||
const { startDate, endDate } = inputValue;
|
||||
const format = 'yyyy-mm-dd';
|
||||
value = `${formatDate(startDate, format)} - ${formatDate(endDate, format)}`;
|
||||
}
|
||||
submitValue(value);
|
||||
this.hideDatePicker();
|
||||
},
|
||||
},
|
||||
CUSTOM_DATE_FILTER_TYPE: 'custom-date',
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<gl-filtered-search-token
|
||||
:data-segment-input-attributes="dataSegmentInputAttributes"
|
||||
v-bind="{ ...$props, ...$attrs }"
|
||||
:view-only="datePickerShown"
|
||||
:active="isActive"
|
||||
:value="computedValue"
|
||||
:config="computedConfig"
|
||||
v-on="$listeners"
|
||||
@complete="handleComplete"
|
||||
>
|
||||
<template #before-data-segment-input="{ submitValue }">
|
||||
<gl-icon
|
||||
v-if="datePickerShown"
|
||||
class="gl-text-gray-500"
|
||||
name="calendar"
|
||||
style="margin-left: 5px; margin-right: -15px; z-index: 1; pointer-events: none"
|
||||
/>
|
||||
<div
|
||||
v-if="datePickerShown"
|
||||
v-outside="handleClickOutside"
|
||||
class="gl-absolute gl-top-full gl-z-1 gl-my-2 gl-rounded-base gl-border-1 gl-border-gray-200 gl-bg-white gl-p-4 gl-shadow-x0-y2-b4-s0"
|
||||
>
|
||||
<gl-daterange-picker
|
||||
:max-date-range="computedConfig.maxDateRange"
|
||||
start-opened
|
||||
:default-start-date="defaultStartDate"
|
||||
:default-max-date="defaultStartDate"
|
||||
@input="selectValue($event, submitValue)"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<template #suggestions>
|
||||
<div v-if="!datePickerShown">
|
||||
<gl-filtered-search-suggestion
|
||||
v-for="token in suggestions"
|
||||
:key="token.value"
|
||||
:value="token.value"
|
||||
>
|
||||
{{ token.text }}
|
||||
</gl-filtered-search-suggestion>
|
||||
</div>
|
||||
</template>
|
||||
</gl-filtered-search-token>
|
||||
</template>
|
||||
|
|
@ -39,10 +39,14 @@ module WikiPages
|
|||
|
||||
# This method throws an error if internal_event_name returns an unknown event name
|
||||
def increment_usage(page)
|
||||
track_event(page, internal_event_name)
|
||||
end
|
||||
|
||||
def track_event(page, event_name)
|
||||
label = 'template' if page.template?
|
||||
|
||||
Gitlab::InternalEvents.track_event(
|
||||
internal_event_name,
|
||||
event_name,
|
||||
user: current_user,
|
||||
project: project,
|
||||
namespace: group,
|
||||
|
|
|
|||
|
|
@ -32,3 +32,5 @@ module WikiPages
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
WikiPages::CreateService.prepend_mod_with('WikiPages::CreateService')
|
||||
|
|
|
|||
|
|
@ -29,3 +29,5 @@ module WikiPages
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
WikiPages::DestroyService.prepend_mod_with('WikiPages::DestroyService')
|
||||
|
|
|
|||
|
|
@ -41,3 +41,5 @@ module WikiPages
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
WikiPages::UpdateService.prepend_mod_with('WikiPages::UpdateService')
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ module BulkImports
|
|||
data_consistency :always # rubocop:disable SidekiqLoadBalancing/WorkerDataConsistency
|
||||
feature_category :importers
|
||||
sidekiq_options dead: false, retry: 6
|
||||
sidekiq_options max_retries_after_interruption: 20
|
||||
worker_has_external_dependencies!
|
||||
worker_resource_boundary :memory
|
||||
idempotent!
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ module BulkImports
|
|||
data_consistency :always
|
||||
feature_category :importers
|
||||
sidekiq_options dead: false, retry: 6
|
||||
sidekiq_options max_retries_after_interruption: 20
|
||||
worker_has_external_dependencies!
|
||||
deduplicate :until_executing
|
||||
worker_resource_boundary :memory
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ module Import
|
|||
data_consistency :delayed
|
||||
feature_category :importers
|
||||
sidekiq_options retry: 6, dead: false
|
||||
sidekiq_options max_retries_after_interruption: 20
|
||||
worker_has_external_dependencies!
|
||||
|
||||
PERFORM_DELAY = 1.minute
|
||||
|
|
|
|||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
description: Wiki page in a group wiki is created
|
||||
internal_events: true
|
||||
action: create_group_wiki_page
|
||||
identifiers:
|
||||
- namespace
|
||||
- user
|
||||
additional_properties:
|
||||
label:
|
||||
description: "Is 'template' when the page is a template"
|
||||
property:
|
||||
description: "Name of the markup language used by the page"
|
||||
product_group: knowledge
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
distributions:
|
||||
- ee
|
||||
tiers:
|
||||
- premium
|
||||
- ultimate
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
description: Wiki page in a group wiki is deleted
|
||||
internal_events: true
|
||||
action: delete_group_wiki_page
|
||||
identifiers:
|
||||
- namespace
|
||||
- user
|
||||
additional_properties:
|
||||
label:
|
||||
description: "Is 'template' when the page is a template"
|
||||
property:
|
||||
description: "Name of the markup language used by the page"
|
||||
product_group: knowledge
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
distributions:
|
||||
- ee
|
||||
tiers:
|
||||
- premium
|
||||
- ultimate
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
description: Wiki page in a group wiki is updated
|
||||
internal_events: true
|
||||
action: update_group_wiki_page
|
||||
identifiers:
|
||||
- namespace
|
||||
- user
|
||||
additional_properties:
|
||||
label:
|
||||
description: "Is 'template' when the page is a template"
|
||||
property:
|
||||
description: "Name of the markup language used by the page"
|
||||
product_group: knowledge
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
distributions:
|
||||
- ee
|
||||
tiers:
|
||||
- premium
|
||||
- ultimate
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
key_path: redis_hll_counters.count_distinct_user_id_from_create_group_wiki_page_monthly
|
||||
description: Monthly count of unique users who created a wiki page in group wiki
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 28d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: create_group_wiki_page
|
||||
unique: user.id
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
key_path: redis_hll_counters.count_distinct_user_id_from_delete_group_wiki_page_monthly
|
||||
description: Monthly count of unique users who deleted a wiki page in group wiki
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 28d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: delete_group_wiki_page
|
||||
unique: user.id
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
key_path: redis_hll_counters.count_distinct_user_id_from_update_group_wiki_page_monthly
|
||||
description: Monthly count of unique users who updated a wiki page in group wiki
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 28d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: update_group_wiki_page
|
||||
unique: user.id
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
key_path: counts.count_total_create_group_wiki_page_monthly
|
||||
description: Monthly count of wiki pages created in group wikis
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 28d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: create_group_wiki_page
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
key_path: counts.count_total_delete_group_wiki_page_monthly
|
||||
description: Monthly count of wiki pages deleted in group wikis
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 28d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: delete_group_wiki_page
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
key_path: counts.count_total_update_group_wiki_page_monthly
|
||||
description: Monthly count of wiki pages updated in group wikis
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 28d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: update_group_wiki_page
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
key_path: redis_hll_counters.count_distinct_user_id_from_create_group_wiki_page_weekly
|
||||
description: Weekly count of unique users who created a wiki page in group wiki
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 7d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: create_group_wiki_page
|
||||
unique: user.id
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
key_path: redis_hll_counters.count_distinct_user_id_from_delete_group_wiki_page_weekly
|
||||
description: Weekly count of unique users who deleted a wiki page in group wiki
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 7d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: delete_group_wiki_page
|
||||
unique: user.id
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
key_path: redis_hll_counters.count_distinct_user_id_from_update_group_wiki_page_weekly
|
||||
description: Weekly count of unique users who updated a wiki page in group wiki
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 7d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: update_group_wiki_page
|
||||
unique: user.id
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
key_path: counts.count_total_create_group_wiki_page_weekly
|
||||
description: Weekly count of wiki pages created in group wikis
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 7d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: create_group_wiki_page
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
key_path: counts.count_total_delete_group_wiki_page_weekly
|
||||
description: Weekly count of wiki pages deleted in group wikis
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 7d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: delete_group_wiki_page
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
key_path: counts.count_total_update_group_wiki_page_weekly
|
||||
description: Weekly count of wiki pages updated in group wikis
|
||||
product_group: knowledge
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.5'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167091
|
||||
time_frame: 7d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
distribution:
|
||||
- ee
|
||||
tier:
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: update_group_wiki_page
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
migration_job_name: FixProjectSettingsHasVulnerabilities
|
||||
description: Fix the project_settings#has_vulnerabilities column data inconsistencies
|
||||
feature_category: vulnerability_management
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167933
|
||||
milestone: '17.5'
|
||||
queued_migration_version: 20241002155253
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class QueueFixProjectSettingsHasVulnerabilities < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.5'
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_sec
|
||||
|
||||
MIGRATION = "FixProjectSettingsHasVulnerabilities"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:vulnerability_reads,
|
||||
:project_id,
|
||||
batch_class_name: 'LooseIndexScanBatchingStrategy',
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(MIGRATION, :vulnerability_reads, :project_id, [])
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
ab1427627730b92e71f06a9cfe4c2615b61bc648f6b86e814c92e29572f95905
|
||||
|
|
@ -69,25 +69,29 @@ Enable the Advanced SAST analyzer to discover vulnerabilities in your applicatio
|
|||
cross-function and cross-file taint analysis. You can then adjust its behavior by using CI/CD
|
||||
variables.
|
||||
|
||||
### Enabling the analyzer
|
||||
### Requirements
|
||||
|
||||
Prerequisites:
|
||||
Like other GitLab SAST analyzers, the Advanced SAST analyzer requires a runner and a CI/CD pipeline; see [SAST requirements](index.md#requirements) for details.
|
||||
|
||||
- GitLab version 17.1 or later, if you are running a self-managed instance. (GitLab.com is ready to use.)
|
||||
- The `.gitlab-ci.yml` file must include:
|
||||
- The `test` stage.
|
||||
On GitLab self-managed, you must also use a GitLab version that supports Advanced SAST:
|
||||
|
||||
To enable the Advanced SAST analyzer:
|
||||
- You should use GitLab 17.4 or later if possible. GitLab 17.4 includes a new code-flow view, vulnerability deduplication, and further updates to the SAST CI/CD template.
|
||||
- The [SAST CI/CD templates](index.md#stable-vs-latest-sast-templates) were updated to include Advanced SAST in the following releases:
|
||||
- The stable template includes Advanced SAST in GitLab 17.3 or later.
|
||||
- The latest template includes Advanced SAST in GitLab 17.2 or later. Note that you [should not mix latest and stable templates](../index.md#template-editions) in a single project.
|
||||
- At a minimum, GitLab Advanced SAST requires version 17.1 or later.
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Build > Pipeline editor**.
|
||||
1. If no `.gitlab-ci.yml` file exists, select **Configure pipeline**, then delete the example
|
||||
content.
|
||||
1. Include a SAST template (if not already done), either `Jobs/SAST.gitlab-ci.yml` or `Jobs/SAST.latest.gitlab-ci.yml`.
|
||||
**Note:** The `latest` templates can receive breaking changes in any release.
|
||||
1. Set the CI/CD variable `GITLAB_ADVANCED_SAST_ENABLED` to `true`.
|
||||
### Enabling Advanced SAST scanning
|
||||
|
||||
Here is a minimal YAML file for enabling GitLab Advanced SAST:
|
||||
Advanced SAST is included in the standard GitLab SAST CI/CD template, but isn't yet enabled by default.
|
||||
To enable it, set the CI/CD variable `GITLAB_ADVANCED_SAST_ENABLED` to `true`.
|
||||
You can set this variable in different ways depending on how you manage your CI/CD configuration.
|
||||
|
||||
#### Edit the CI/CD pipeline definition manually
|
||||
|
||||
If you've already enabled GitLab SAST scanning in your project, add a new CI/CD variable to enable GitLab SAST.
|
||||
|
||||
This minimal YAML file includes the [stable SAST template](index.md#stable-vs-latest-sast-templates) and enables Advanced SAST:
|
||||
|
||||
```yaml
|
||||
include:
|
||||
|
|
@ -97,6 +101,29 @@ variables:
|
|||
GITLAB_ADVANCED_SAST_ENABLED: 'true'
|
||||
```
|
||||
|
||||
#### Enforce it in a Scan Execution Policy
|
||||
|
||||
To enable Advanced SAST in a [Scan Execution Policy](../policies/scan_execution_policies.md), update your policy's scan action to set the CI/CD variable `GITLAB_ADVANCED_SAST_ENABLED` to `true`.
|
||||
You can set this variable by:
|
||||
|
||||
- Selecting it from the menu in the [policy editor](../policies/scan_execution_policies.md#scan-execution-policy-editor).
|
||||
- Adding it to the [`variables` object](../policies/scan_execution_policies.md#scan-action-type) in the scan action.
|
||||
|
||||
#### By using the pipeline editor
|
||||
|
||||
To enable Advanced SAST by using the pipeline editor:
|
||||
|
||||
1. In your project, select **Build > Pipeline editor**.
|
||||
1. If no `.gitlab-ci.yml` file exists, select **Configure pipeline**, then delete the example
|
||||
content.
|
||||
1. Update the CI/CD configuration to:
|
||||
- Include one of the GitLab-managed [SAST CI/CD templates](index.md#stable-vs-latest-sast-templates) if it is not [already included](index.md#configure-sast-in-your-cicd-yaml).
|
||||
- In GitLab 17.3 or later, you should use the stable template, `Jobs/SAST.gitlab-ci.yml`.
|
||||
- In GitLab 17.2, Advanced SAST is only available in the latest template, `Jobs/SAST.latest.gitlab-ci.yml`. Note that you [should not mix latest and stable templates](../index.md#template-editions) in a single project.
|
||||
- In GitLab 17.1, you must manually copy the contents of the Advanced SAST job into your CI/CD pipeline definition.
|
||||
- Set the CI/CD variable `GITLAB_ADVANCED_SAST_ENABLED` to `true`.
|
||||
|
||||
See the [minimal YAML example above](#edit-the-cicd-pipeline-definition-manually).
|
||||
1. Select the **Validate** tab, then select **Validate pipeline**.
|
||||
|
||||
The message **Simulation completed successfully** confirms the file is valid.
|
||||
|
|
@ -108,7 +135,7 @@ variables:
|
|||
merge request**.
|
||||
1. Review and edit the merge request according to your standard workflow, then select **Merge**.
|
||||
|
||||
Pipelines now include an advanced SAST job.
|
||||
Pipelines now include an Advanced SAST job.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
|
|
|||
|
|
@ -244,6 +244,21 @@ of your group. They can, therefore, view information they should not be able to
|
|||
Reassigning contributions to users with administrator access is disabled by default, but you can
|
||||
[enable](../../../administration/settings/import_and_export_settings.md#allow-contribution-mapping-to-administrators) it.
|
||||
|
||||
##### Membership security considerations
|
||||
|
||||
Because of the GitLab permissions model, when a group or project is imported into an existing parent group, members of
|
||||
the parent group are granted [inherited membership](../members/index.md#membership-types) of the imported group or project.
|
||||
|
||||
Selecting a user for contribution and membership reassignment who already has an
|
||||
existing inherited membership of the imported group or project can affect how memberships
|
||||
are reassigned to them.
|
||||
|
||||
GitLab does not allow a membership in a child project or group to have a lower role
|
||||
than an inherited membership. If an imported membership for an assigned user has a lower role
|
||||
than their existing inherited membership, the imported membership is not reassigned to the user.
|
||||
|
||||
This results in their membership for the imported group or project being higher than it was on the source.
|
||||
|
||||
#### Request reassignment in UI
|
||||
|
||||
Prerequisites:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
# Fixes the project_settings#has_vulnerabilities data inconsistencies
|
||||
class FixProjectSettingsHasVulnerabilities < BatchedMigrationJob
|
||||
feature_category :vulnerability_management
|
||||
|
||||
def perform; end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Gitlab::BackgroundMigration::FixProjectSettingsHasVulnerabilities.prepend_mod
|
||||
|
|
@ -5,7 +5,7 @@ module Gitlab
|
|||
class SourceUserMapper
|
||||
include Gitlab::ExclusiveLeaseHelpers
|
||||
|
||||
LRU_CACHE_SIZE = 8000
|
||||
LRU_CACHE_SIZE = 100
|
||||
LOCK_TTL = 15.seconds.freeze
|
||||
LOCK_SLEEP = 0.3.seconds.freeze
|
||||
LOCK_RETRIES = 100
|
||||
|
|
@ -31,7 +31,6 @@ module Gitlab
|
|||
#
|
||||
# @param [String] source_user_identifier The identifier for the source user to find.
|
||||
# @return [Import::SourceUser, nil] The found source user object, or `nil` if no match is found.
|
||||
#
|
||||
def find_source_user(source_user_identifier)
|
||||
cache_from_request_store[source_user_identifier] ||= ::Import::SourceUser.uncached do
|
||||
::Import::SourceUser.find_source_user(
|
||||
|
|
@ -43,7 +42,8 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
def find_or_create_source_user(source_name:, source_username:, source_user_identifier:)
|
||||
# Finds a source user by the provided `source_user_identifier` or creates a new one
|
||||
def find_or_create_source_user(source_name:, source_username:, source_user_identifier:, cache: true)
|
||||
source_user = find_source_user(source_user_identifier)
|
||||
|
||||
return source_user if source_user
|
||||
|
|
@ -54,7 +54,9 @@ module Gitlab
|
|||
source_user_identifier: source_user_identifier
|
||||
)
|
||||
|
||||
cache_from_request_store[source_user_identifier] = source_user
|
||||
cache_from_request_store[source_user_identifier] = source_user if cache
|
||||
|
||||
source_user
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -51,7 +51,8 @@ module Import
|
|||
context.source_user_mapper.find_or_create_source_user(
|
||||
source_user_identifier: source_user_id,
|
||||
source_name: source_name,
|
||||
source_username: source_username
|
||||
source_username: source_username,
|
||||
cache: false
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -85,11 +85,11 @@ module QA
|
|||
end
|
||||
|
||||
def running?
|
||||
`docker ps -f name=#{@name}`.include?(@name)
|
||||
shell("docker ps -f name=#{@name}").include?(@name)
|
||||
end
|
||||
|
||||
def read_file(file_path)
|
||||
`docker exec #{@name} /bin/cat #{file_path}`
|
||||
shell("docker exec #{@name} /bin/cat #{file_path}")
|
||||
end
|
||||
|
||||
def restart
|
||||
|
|
|
|||
|
|
@ -1,67 +1,179 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require "socket"
|
||||
|
||||
module QA
|
||||
module Service
|
||||
module DockerRun
|
||||
class Smocker < Base
|
||||
def initialize(name: 'smocker-server')
|
||||
DEFAULT_SERVER_PORT = 8080
|
||||
DEFAULT_CONFIG_PORT = 8081
|
||||
|
||||
private_class_method :new
|
||||
|
||||
class << self
|
||||
# Create new instance of smocker container with random name and ports
|
||||
#
|
||||
# @return [QA::Service::DockerRun::Smocker]
|
||||
def create
|
||||
container = new
|
||||
container.register!
|
||||
container.wait_for_running
|
||||
|
||||
container
|
||||
rescue StandardError => e
|
||||
Runtime::Logger.error("Failed to start smocker container, logs:\n#{container.logs}")
|
||||
raise e
|
||||
end
|
||||
|
||||
# @param wait [Integer] seconds to wait for server
|
||||
# @yieldparam [SmockerApi] the api object ready for interaction
|
||||
def init(wait: 10)
|
||||
if @container.nil?
|
||||
@container = create
|
||||
|
||||
@api = Vendor::Smocker::SmockerApi.new(
|
||||
host: @container.host_name,
|
||||
public_port: @container.public_port,
|
||||
admin_port: @container.admin_port
|
||||
)
|
||||
@api.wait_for_ready(wait: wait)
|
||||
end
|
||||
|
||||
yield @api
|
||||
end
|
||||
|
||||
def teardown!
|
||||
@container&.remove!
|
||||
@container = nil
|
||||
@api = nil
|
||||
end
|
||||
end
|
||||
|
||||
def initialize
|
||||
@image = 'thiht/smocker:0.18.5'
|
||||
@name = name
|
||||
@public_port = 8080
|
||||
@admin_port = 8081
|
||||
@name = "smocker-service-#{SecureRandom.hex(6)}"
|
||||
|
||||
super()
|
||||
end
|
||||
|
||||
# @param wait [Integer] seconds to wait for server
|
||||
# @yieldparam [SmockerApi] the api object ready for interaction
|
||||
def self.init(wait: 10)
|
||||
if @container.nil?
|
||||
@container = new
|
||||
@container.register!
|
||||
@container.wait_for_running
|
||||
|
||||
@api = Vendor::Smocker::SmockerApi.new(
|
||||
host: @container.host_name,
|
||||
public_port: @container.public_port,
|
||||
admin_port: @container.admin_port
|
||||
)
|
||||
@api.wait_for_ready(wait: wait)
|
||||
end
|
||||
|
||||
yield @api
|
||||
end
|
||||
|
||||
def self.teardown!
|
||||
@container&.remove!
|
||||
@container = nil
|
||||
@api = nil
|
||||
end
|
||||
|
||||
attr_reader :public_port, :admin_port
|
||||
|
||||
# Wait for container to be running
|
||||
#
|
||||
# @return [void]
|
||||
def wait_for_running
|
||||
Support::Waiter.wait_until(raise_on_failure: false, reload_page: false) do
|
||||
Support::Waiter.wait_until(max_duration: 10, reload_page: false) do
|
||||
running?
|
||||
end
|
||||
end
|
||||
|
||||
# Start smocker container
|
||||
#
|
||||
# @return [void]
|
||||
def register!
|
||||
command = <<~CMD.tr("\n", ' ')
|
||||
docker run -d --rm
|
||||
--network #{network}
|
||||
--name #{name}
|
||||
--publish #{public_port}:8080
|
||||
--publish #{admin_port}:8081
|
||||
#{image}
|
||||
CMD
|
||||
return if running?
|
||||
|
||||
shell command
|
||||
command = %W[docker run -d --network #{network} --name #{name}]
|
||||
# when host network is used, published ports are discarded and service in container runs as if on host
|
||||
# make sure random open ports are fetched and configured for smocker server
|
||||
command.push("-e", "SMOCKER_MOCK_SERVER_LISTEN_PORT=#{host_network? ? server_port : DEFAULT_SERVER_PORT}")
|
||||
command.push("-e", "SMOCKER_CONFIG_LISTEN_PORT=#{host_network? ? config_port : DEFAULT_CONFIG_PORT}")
|
||||
command.push("--publish-all") unless host_network?
|
||||
command.push(image)
|
||||
|
||||
shell command.join(" ")
|
||||
end
|
||||
|
||||
# Server port
|
||||
#
|
||||
# When running in contained docker network, return internal port because service is accessed using hostname:port
|
||||
#
|
||||
# @return [Integer]
|
||||
def public_port
|
||||
@public_port ||= if host_network?
|
||||
server_port
|
||||
elsif docker_network?
|
||||
DEFAULT_SERVER_PORT
|
||||
else
|
||||
fetch_published_port(DEFAULT_SERVER_PORT)
|
||||
end
|
||||
end
|
||||
|
||||
# Admin port
|
||||
#
|
||||
# When running in contained docker network, return internal port because service is accessed using hostname:port
|
||||
#
|
||||
# @return [Integer]
|
||||
def admin_port
|
||||
@admin_port ||= if host_network?
|
||||
config_port
|
||||
elsif docker_network?
|
||||
DEFAULT_CONFIG_PORT
|
||||
else
|
||||
fetch_published_port(DEFAULT_CONFIG_PORT)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :name, :image
|
||||
|
||||
# Random open port for server
|
||||
#
|
||||
# @return [Integer]
|
||||
def server_port
|
||||
@server_port ||= random_port
|
||||
end
|
||||
|
||||
# Random open port for server configuration
|
||||
#
|
||||
# @return [Integer]
|
||||
def config_port
|
||||
@config_port ||= random_port
|
||||
end
|
||||
|
||||
# Host network used?
|
||||
#
|
||||
# @return [Boolea]
|
||||
def host_network?
|
||||
network == "host"
|
||||
end
|
||||
|
||||
# Running within custom docker network
|
||||
#
|
||||
# @return [Boolean]
|
||||
def docker_network?
|
||||
host_name == "#{name}.#{network}"
|
||||
end
|
||||
|
||||
# Fetch published container port
|
||||
#
|
||||
# @param [Integer] container_port
|
||||
# @return [Integer]
|
||||
def fetch_published_port(container_port)
|
||||
port = published_ports.split("\n").find { |line| line.start_with?(container_port.to_s) }.split(':').last
|
||||
raise("Could not find published #{container_port} port for container #{name}") unless port
|
||||
|
||||
port.to_i
|
||||
end
|
||||
|
||||
# Published ports for smocker container
|
||||
#
|
||||
# @return [String]
|
||||
def published_ports
|
||||
@published_ports ||= shell("docker port #{name}").presence || raise(
|
||||
"Unable to fetch published ports for smocker container #{name}"
|
||||
)
|
||||
end
|
||||
|
||||
# Random unassigned port
|
||||
#
|
||||
# @return [Integer]
|
||||
def random_port
|
||||
server = TCPServer.new('127.0.0.1', 0)
|
||||
port = server.addr[1]
|
||||
server.close
|
||||
port
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,38 +2,52 @@
|
|||
|
||||
module QA
|
||||
module Support
|
||||
class AuditEventStreamingService < Vendor::Smocker::SmockerApi
|
||||
class AuditEventStreamingService
|
||||
def initialize(wait: 10, reset_on_init: true)
|
||||
@mocks = File.read(EE::Runtime::Path.fixture('audit_event_streaming', 'mocks.yml'))
|
||||
|
||||
# We use the time of initialization to limit the results we get from the audit events API
|
||||
@start = DateTime.now.iso8601
|
||||
@smocker_container = Service::DockerRun::Smocker.new
|
||||
@smocker_container.register!
|
||||
@smocker_container.wait_for_running
|
||||
|
||||
super(
|
||||
@smocker_container = Service::DockerRun::Smocker.create
|
||||
@api = Vendor::Smocker::SmockerApi.new(
|
||||
host: @smocker_container.host_name,
|
||||
public_port: @smocker_container.public_port,
|
||||
admin_port: @smocker_container.admin_port
|
||||
)
|
||||
wait_for_ready(wait: wait)
|
||||
reset if reset_on_init
|
||||
register(mocks)
|
||||
@api.wait_for_ready(wait: wait)
|
||||
@api.reset if reset_on_init
|
||||
@api.register(mocks)
|
||||
end
|
||||
|
||||
def logs
|
||||
@smocker_container.logs
|
||||
delegate :verify, to: :api
|
||||
|
||||
# Fetch smocker container logs
|
||||
#
|
||||
# @return [String]
|
||||
def container_logs
|
||||
smocker_container.logs
|
||||
end
|
||||
|
||||
# Reset mock definitions
|
||||
#
|
||||
# @return [void]
|
||||
def reset!
|
||||
reset
|
||||
register(mocks)
|
||||
api.reset
|
||||
api.register(mocks)
|
||||
end
|
||||
|
||||
# Remove the Smocker Docker container
|
||||
#
|
||||
# @return [void]
|
||||
def teardown!
|
||||
@smocker_container.remove! if @smocker_container
|
||||
smocker_container&.remove!
|
||||
end
|
||||
|
||||
# Stream destination url
|
||||
#
|
||||
# @return [String]
|
||||
def destination_url
|
||||
@logs_endpoint ||= api.url('logs')
|
||||
end
|
||||
|
||||
# Wait for the mock service to receive a request with the specified event type
|
||||
|
|
@ -46,7 +60,7 @@ module QA
|
|||
# @return [Hash] the request
|
||||
def wait_for_event(event_type, entity_type, entity_path = nil, wait: 10, raise_on_failure: true)
|
||||
event = Waiter.wait_until(max_duration: wait, sleep_interval: 1, raise_on_failure: false) do
|
||||
history.find do |record|
|
||||
api.history.find do |record|
|
||||
body = record.request[:body]
|
||||
next if body.blank?
|
||||
|
||||
|
|
@ -62,8 +76,8 @@ module QA
|
|||
|
||||
raise Repeater::WaitExceededError,
|
||||
"An event with type '#{event_type}'#{" and entity_path '#{entity_path}'" if entity_path} was not received. " \
|
||||
"Event history: #{stringified_history}. " \
|
||||
"Audit events with entity_type '#{entity_type}': #{audit_events}"
|
||||
"Event history: #{stringified_history}. " \
|
||||
"Audit events with entity_type '#{entity_type}': #{audit_events}"
|
||||
end
|
||||
|
||||
# Wait for GitLab to start streaming audit events and for the Smocker server to be ready to receive them.
|
||||
|
|
@ -80,19 +94,14 @@ module QA
|
|||
end
|
||||
rescue Repeater::WaitExceededError
|
||||
# If there is a failure this will output the logs from the smocker container (at the debug log level)
|
||||
logs
|
||||
container_logs
|
||||
|
||||
raise
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# The configuration for the mocked requests and responses for events that will be verified in this test
|
||||
#
|
||||
# @return [String]
|
||||
def mocks
|
||||
@mocks ||= File.read(EE::Runtime::Path.fixture('audit_event_streaming', 'mocks.yml'))
|
||||
end
|
||||
attr_reader :mocks, :smocker_container, :api
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,49 +1,95 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# rubocop:disable RSpec/AnyInstanceOf -- allow_next_instance_of relies on gitlab/rspec
|
||||
module QA
|
||||
RSpec.describe Service::DockerRun::Smocker do
|
||||
let(:name) { 'smocker-12345' }
|
||||
let(:network) { 'thenet' }
|
||||
let(:host_ip) { '1.2.3.4' }
|
||||
let(:network) { "thenet" }
|
||||
let(:image) { "thiht/smocker:0.18.5" }
|
||||
let(:server_port) { 8080 }
|
||||
let(:config_port) { 8081 }
|
||||
let(:port_args) { "-e SMOCKER_MOCK_SERVER_LISTEN_PORT=#{server_port} -e SMOCKER_CONFIG_LISTEN_PORT=#{config_port}" }
|
||||
let(:base_cmd_pattern) { /docker run -d --network #{network} --name smocker-service-\w+ #{port_args}/ }
|
||||
|
||||
subject(:smocker_container) { described_class.new(name: name) }
|
||||
subject(:container) { described_class.create }
|
||||
|
||||
before do
|
||||
# rubocop:disable RSpec/AnyInstanceOf -- allow_next_instance_of relies on gitlab/rspec
|
||||
allow_any_instance_of(described_class).to receive(:shell).and_return("")
|
||||
allow_any_instance_of(described_class).to receive(:network).and_return(network)
|
||||
# rubocop:enable RSpec/AnyInstanceOf
|
||||
|
||||
allow(smocker_container).to receive(:host_ip).and_return(host_ip)
|
||||
allow(Support::Waiter).to receive(:wait_until).and_return(true)
|
||||
end
|
||||
|
||||
describe '#host_name' do
|
||||
shared_examples 'returns host ip' do
|
||||
it 'returns host ip' do
|
||||
expect(smocker_container.host_name).to eq(host_ip)
|
||||
describe "#create" do
|
||||
context "with successful creation" do
|
||||
it "creates new instance of smocker container" do
|
||||
expect(container).to be_instance_of(described_class)
|
||||
expect(container).to have_received(:shell).with(/#{base_cmd_pattern} --publish-all #{image}/)
|
||||
expect(container.public_port).to eq(server_port)
|
||||
expect(container.admin_port).to eq(config_port)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'returns name.network' do
|
||||
it 'returns name.network' do
|
||||
expect(smocker_container.host_name).to eq("#{name}.#{network}")
|
||||
context "with failed creation" do
|
||||
before do
|
||||
allow(Support::Waiter).to receive(:wait_until).and_raise("error")
|
||||
allow(Runtime::Logger).to receive(:error)
|
||||
allow_any_instance_of(described_class).to receive(:shell)
|
||||
.with(/docker logs smocker-service-\w+/)
|
||||
.and_return("logs")
|
||||
end
|
||||
|
||||
it "raises error" do
|
||||
expect { container }.to raise_error("error")
|
||||
expect(Runtime::Logger).to have_received(:error).with("Failed to start smocker container, logs:\nlogs")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when network is not bridge or host' do
|
||||
it_behaves_like 'returns name.network'
|
||||
context "with host network" do
|
||||
let(:network) { "host" }
|
||||
let(:server_port) { 55020 }
|
||||
let(:config_port) { 55021 }
|
||||
|
||||
let(:server) { instance_double(TCPServer, close: nil) }
|
||||
|
||||
before do
|
||||
allow(TCPServer).to receive(:new).and_return(server)
|
||||
allow(server).to receive(:addr).and_return([nil, server_port], [nil, config_port])
|
||||
end
|
||||
|
||||
it "set random open ports for smocker service" do
|
||||
expect(container).to have_received(:shell).with(/#{base_cmd_pattern} #{image}/)
|
||||
expect(container.public_port).to eq(server_port)
|
||||
expect(container.admin_port).to eq(config_port)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#init" do
|
||||
let(:api) { instance_double(Vendor::Smocker::SmockerApi, wait_for_ready: true) }
|
||||
|
||||
before do
|
||||
described_class.instance_variable_set(:@container, nil)
|
||||
|
||||
allow(Vendor::Smocker::SmockerApi).to receive(:new).and_return(api)
|
||||
allow(described_class).to receive(:create).and_return(container)
|
||||
allow(container).to receive(:host_name).and_return('localhost')
|
||||
allow(container).to receive(:shell).with(/docker port smocker-service-\w+/).and_return(<<~PORTS)
|
||||
8080/tcp -> 0.0.0.0:55020
|
||||
8081/tcp -> 0.0.0.0:55021
|
||||
PORTS
|
||||
end
|
||||
|
||||
context 'when network is bridge' do
|
||||
let(:network) { 'bridge' }
|
||||
it "create new instance of SmockerApi" do
|
||||
described_class.init(wait: 10) { |smocker_api| expect(smocker_api).to eq(api) }
|
||||
|
||||
it_behaves_like 'returns host ip'
|
||||
end
|
||||
|
||||
context 'when network is host' do
|
||||
let(:network) { 'host' }
|
||||
|
||||
it_behaves_like 'returns host ip'
|
||||
expect(Vendor::Smocker::SmockerApi).to have_received(:new).with(
|
||||
host: "localhost",
|
||||
public_port: 55020,
|
||||
admin_port: 55021
|
||||
)
|
||||
expect(api).to have_received(:wait_for_ready).with(wait: 10)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
# rubocop:enable RSpec/AnyInstanceOf
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ import EmptyViewer from '~/repository/components/blob_viewers/empty_viewer.vue';
|
|||
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
|
||||
import blobInfoQuery from 'shared_queries/repository/blob_info.query.graphql';
|
||||
import projectInfoQuery from '~/repository/queries/project_info.query.graphql';
|
||||
import highlightMixin from '~/repository/mixins/highlight_mixin';
|
||||
import getRefMixin from '~/repository/mixins/get_ref';
|
||||
import CodeIntelligence from '~/code_navigation/components/app.vue';
|
||||
import * as urlUtility from '~/lib/utils/url_utility';
|
||||
import { isLoggedIn, handleLocationHash } from '~/lib/utils/common_utils';
|
||||
|
|
@ -34,7 +36,6 @@ import {
|
|||
projectMock,
|
||||
userPermissionsMock,
|
||||
propsMock,
|
||||
refMock,
|
||||
axiosMockResponse,
|
||||
} from '../mock_data';
|
||||
|
||||
|
|
@ -127,7 +128,7 @@ const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute
|
|||
store: createMockStore(),
|
||||
apolloProvider: fakeApollo,
|
||||
propsData: propsMock,
|
||||
mixins: [{ data: () => ({ ref: refMock }) }],
|
||||
mixins: [getRefMixin, highlightMixin],
|
||||
mocks: {
|
||||
$route: mockRoute,
|
||||
$router: mockRouter,
|
||||
|
|
|
|||
|
|
@ -1,178 +0,0 @@
|
|||
import {
|
||||
GlDaterangePicker,
|
||||
GlFilteredSearchSuggestion,
|
||||
GlFilteredSearchSuggestionList,
|
||||
GlFilteredSearchToken,
|
||||
} from '@gitlab/ui';
|
||||
import { mountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import DaterangeToken from '~/vue_shared/components/filtered_search_bar/tokens/daterange_token.vue';
|
||||
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
|
||||
|
||||
const CUSTOM_DATE = 'custom-date';
|
||||
|
||||
describe('DaterangeToken', () => {
|
||||
let wrapper;
|
||||
|
||||
const defaultProps = {
|
||||
active: true,
|
||||
value: {
|
||||
data: '',
|
||||
},
|
||||
config: {
|
||||
operators: OPERATORS_IS,
|
||||
options: [
|
||||
{
|
||||
value: 'last_week',
|
||||
title: 'Last week',
|
||||
},
|
||||
{
|
||||
value: 'last_month',
|
||||
title: 'Last month',
|
||||
},
|
||||
],
|
||||
maxDateRange: 7,
|
||||
},
|
||||
};
|
||||
|
||||
function createComponent(props = {}) {
|
||||
return mountExtended(DaterangeToken, {
|
||||
propsData: { ...defaultProps, ...props },
|
||||
stubs: {
|
||||
Portal: true,
|
||||
},
|
||||
provide: {
|
||||
portalName: 'fake target',
|
||||
alignSuggestions: function fakeAlignSuggestions() {},
|
||||
suggestionsListClass: () => 'custom-class',
|
||||
termsAsTokens: () => false,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const findGlFilteredSearchToken = () => wrapper.findComponent(GlFilteredSearchToken);
|
||||
const findDateRangePicker = () => wrapper.findComponent(GlDaterangePicker);
|
||||
const findAllSuggestions = () => wrapper.findAllComponents(GlFilteredSearchSuggestion);
|
||||
const selectSuggestion = (suggestion) =>
|
||||
wrapper.findComponent(GlFilteredSearchSuggestionList).vm.$emit('suggestion', suggestion);
|
||||
|
||||
beforeEach(() => {
|
||||
wrapper = createComponent();
|
||||
});
|
||||
|
||||
it('renders a filtered search token', () => {
|
||||
expect(findGlFilteredSearchToken().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('remove the options from the token config', () => {
|
||||
expect(findGlFilteredSearchToken().props('config').options).toBeUndefined();
|
||||
});
|
||||
|
||||
it('does not set the token as view-only', () => {
|
||||
expect(findGlFilteredSearchToken().props('viewOnly')).toBe(false);
|
||||
});
|
||||
|
||||
it('does not show the date picker by default', () => {
|
||||
expect(findDateRangePicker().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('does not re-activate the token', async () => {
|
||||
await wrapper.setProps({ active: false });
|
||||
expect(findGlFilteredSearchToken().props('active')).toBe(false);
|
||||
});
|
||||
|
||||
it('does not override the value', async () => {
|
||||
await wrapper.setProps({ value: { data: 'value' } });
|
||||
expect(findGlFilteredSearchToken().props('value')).toEqual({ data: 'value' });
|
||||
});
|
||||
|
||||
it('renders a list of suggestions as specified by the config', () => {
|
||||
const suggestions = findAllSuggestions();
|
||||
expect(suggestions.exists()).toBe(true);
|
||||
expect(suggestions).toHaveLength(defaultProps.config.options.length + 1);
|
||||
[...defaultProps.config.options, { value: CUSTOM_DATE, title: 'Custom' }].forEach(
|
||||
(option, i) => {
|
||||
expect(suggestions.at(i).props('value')).toBe(option.value);
|
||||
expect(suggestions.at(i).text()).toBe(option.title);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('sets maxDateRange on the datepicker', async () => {
|
||||
await selectSuggestion(CUSTOM_DATE);
|
||||
|
||||
expect(findDateRangePicker().props('maxDateRange')).toBe(defaultProps.config.maxDateRange);
|
||||
});
|
||||
|
||||
it('sets the dataSegmentInputAttributes', () => {
|
||||
expect(findGlFilteredSearchToken().props('dataSegmentInputAttributes')).toEqual({
|
||||
id: 'time_range_data_segment_input',
|
||||
});
|
||||
});
|
||||
|
||||
describe('when a default option is selected', () => {
|
||||
const option = defaultProps.config.options[0].value;
|
||||
beforeEach(async () => {
|
||||
await selectSuggestion(option);
|
||||
});
|
||||
it('does not show the date picker if default option is selected', () => {
|
||||
expect(findDateRangePicker().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('sets the value', () => {
|
||||
expect(findGlFilteredSearchToken().emitted().select).toEqual([[option]]);
|
||||
expect(findGlFilteredSearchToken().emitted().complete).toEqual([[option]]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when custom-date option is selected', () => {
|
||||
beforeEach(async () => {
|
||||
await selectSuggestion(CUSTOM_DATE);
|
||||
});
|
||||
|
||||
it('sets the token as view-only', () => {
|
||||
expect(findGlFilteredSearchToken().props('viewOnly')).toBe(true);
|
||||
});
|
||||
|
||||
it('shows the date picker', () => {
|
||||
expect(findDateRangePicker().exists()).toBe(true);
|
||||
const today = new Date();
|
||||
expect(findDateRangePicker().props('defaultStartDate')).toEqual(today);
|
||||
expect(findDateRangePicker().props('defaultMaxDate')).toEqual(today);
|
||||
expect(findDateRangePicker().props('startOpened')).toBe(true);
|
||||
});
|
||||
|
||||
it('re-activate the token while the date picker is open', async () => {
|
||||
await wrapper.setProps({ active: false });
|
||||
expect(findGlFilteredSearchToken().props('active')).toBe(true);
|
||||
});
|
||||
|
||||
it('overrides the value', async () => {
|
||||
await wrapper.setProps({ value: { data: 'value' } });
|
||||
expect(findGlFilteredSearchToken().props('value')).toEqual({ data: '' });
|
||||
});
|
||||
|
||||
it('sets the dataSegmentInputAttributes', () => {
|
||||
expect(findGlFilteredSearchToken().props('dataSegmentInputAttributes')).toEqual({
|
||||
id: 'time_range_data_segment_input',
|
||||
placeholder: 'YYYY-MM-DD - YYYY-MM-DD',
|
||||
style: 'padding-left: 23px;',
|
||||
});
|
||||
});
|
||||
|
||||
it('sets the date range and hides the picker upon selection', async () => {
|
||||
await findDateRangePicker().vm.$emit('input', {
|
||||
startDate: new Date('October 13, 2014 11:13:00'),
|
||||
endDate: new Date('October 13, 2014 11:13:00'),
|
||||
});
|
||||
expect(findGlFilteredSearchToken().emitted().complete).toEqual([
|
||||
[CUSTOM_DATE],
|
||||
[`"2014-10-13 - 2014-10-13"`],
|
||||
]);
|
||||
expect(findGlFilteredSearchToken().emitted().select).toEqual([
|
||||
[CUSTOM_DATE],
|
||||
[`"2014-10-13 - 2014-10-13"`],
|
||||
]);
|
||||
expect(findDateRangePicker().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -24,6 +24,7 @@ RSpec.describe Gitlab::Import::SourceUserMapper, :request_store, feature_categor
|
|||
let(:source_name) { 'Pry Contributor' }
|
||||
let(:source_username) { 'a_pry_contributor' }
|
||||
let(:source_user_identifier) { '123456' }
|
||||
let(:cache) { false }
|
||||
|
||||
subject(:find_or_create_source_user) do
|
||||
described_class.new(
|
||||
|
|
@ -33,7 +34,8 @@ RSpec.describe Gitlab::Import::SourceUserMapper, :request_store, feature_categor
|
|||
).find_or_create_source_user(
|
||||
source_name: source_name,
|
||||
source_username: source_username,
|
||||
source_user_identifier: source_user_identifier
|
||||
source_user_identifier: source_user_identifier,
|
||||
cache: cache
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -203,6 +205,26 @@ RSpec.describe Gitlab::Import::SourceUserMapper, :request_store, feature_categor
|
|||
expect { find_or_create_source_user }.to raise_error(ActiveRecord::RecordInvalid)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when cache is true' do
|
||||
let(:cache) { true }
|
||||
|
||||
it 'caches the created source user' do
|
||||
source_user = find_or_create_source_user
|
||||
|
||||
expect(Gitlab::SafeRequestStore[:source_user_cache][source_user.source_user_identifier]).to eq(source_user)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when cache is false' do
|
||||
let(:cache) { false }
|
||||
|
||||
it 'does not cache the created source user' do
|
||||
source_user = find_or_create_source_user
|
||||
|
||||
expect(Gitlab::SafeRequestStore[:source_user_cache][source_user.source_user_identifier]).to eq(nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#find_source_user' do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,28 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe QueueFixProjectSettingsHasVulnerabilities, feature_category: :vulnerability_management do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
gitlab_schema: :gitlab_sec,
|
||||
table_name: :vulnerability_reads,
|
||||
column_name: :project_id,
|
||||
batch_class_name: 'LooseIndexScanBatchingStrategy',
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -43,12 +43,27 @@ RSpec.shared_examples 'WikiPages::CreateService#execute' do |container_type|
|
|||
service.execute
|
||||
end
|
||||
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:event) { 'create_wiki_page' }
|
||||
describe 'internal event tracking' do
|
||||
let(:project) { container if container.is_a?(Project) }
|
||||
let(:namespace) { container.is_a?(Group) ? container : container.namespace }
|
||||
|
||||
subject(:track_event) { service.execute }
|
||||
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:event) { 'create_wiki_page' }
|
||||
end
|
||||
|
||||
context 'with group container', if: container_type == :group do
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:event) { 'create_group_wiki_page' }
|
||||
end
|
||||
end
|
||||
|
||||
context 'with project container', if: container_type == :project do
|
||||
it_behaves_like 'internal event not tracked' do
|
||||
let(:event) { 'create_group_wiki_page' }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the new page is a template' do
|
||||
|
|
|
|||
|
|
@ -15,25 +15,36 @@ RSpec.shared_examples 'WikiPages::DestroyService#execute' do |container_type|
|
|||
service.execute(page)
|
||||
end
|
||||
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:event) { 'delete_wiki_page' }
|
||||
describe 'internal event tracking' do
|
||||
let(:project) { container if container.is_a?(Project) }
|
||||
let(:namespace) { container.is_a?(Group) ? container : container.namespace }
|
||||
|
||||
subject(:track_event) { service.execute(page) }
|
||||
end
|
||||
|
||||
context 'when the deleted page is a template' do
|
||||
let(:page) { create(:wiki_page, title: "#{Wiki::TEMPLATES_DIR}/foobar") }
|
||||
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:event) { 'delete_wiki_page' }
|
||||
let(:project) { container if container.is_a?(Project) }
|
||||
let(:namespace) { container.is_a?(Group) ? container : container.namespace }
|
||||
let(:label) { 'template' }
|
||||
let(:property) { 'markdown' }
|
||||
end
|
||||
|
||||
subject(:track_event) { service.execute(page) }
|
||||
context 'with group container', if: container_type == :group do
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:event) { 'delete_group_wiki_page' }
|
||||
end
|
||||
end
|
||||
|
||||
context 'with project container', if: container_type == :project do
|
||||
it_behaves_like 'internal event not tracked' do
|
||||
let(:event) { 'delete_group_wiki_page' }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the deleted page is a template' do
|
||||
let(:page) { create(:wiki_page, title: "#{Wiki::TEMPLATES_DIR}/foobar") }
|
||||
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:event) { 'delete_wiki_page' }
|
||||
let(:label) { 'template' }
|
||||
let(:property) { 'markdown' }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -58,12 +58,27 @@ RSpec.shared_examples 'WikiPages::UpdateService#execute' do |container_type|
|
|||
service.execute(page)
|
||||
end
|
||||
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:event) { 'update_wiki_page' }
|
||||
describe 'internal event tracking' do
|
||||
let(:project) { container if container.is_a?(Project) }
|
||||
let(:namespace) { container.is_a?(Group) ? container : container.namespace }
|
||||
|
||||
subject(:track_event) { service.execute(page) }
|
||||
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:event) { 'update_wiki_page' }
|
||||
end
|
||||
|
||||
context 'with group container', if: container_type == :group do
|
||||
it_behaves_like 'internal event tracking' do
|
||||
let(:event) { 'update_group_wiki_page' }
|
||||
end
|
||||
end
|
||||
|
||||
context 'with project container', if: container_type == :project do
|
||||
it_behaves_like 'internal event not tracked' do
|
||||
let(:event) { 'update_group_wiki_page' }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the updated page is a template' do
|
||||
|
|
|
|||
Loading…
Reference in New Issue