Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
2113bb8ffe
commit
d828e7941d
|
|
@ -4,8 +4,6 @@ import VueRouter from 'vue-router';
|
|||
import IssuesListApp from 'ee_else_ce/issues/list/components/issues_list_app.vue';
|
||||
import { resolvers, config } from '~/graphql_shared/issuable_client';
|
||||
import createDefaultClient, { createApolloClientWithCaching } from '~/lib/graphql';
|
||||
import { addShortcutsExtension } from '~/behaviors/shortcuts';
|
||||
import ShortcutsWorkItems from '~/behaviors/shortcuts/shortcuts_work_items';
|
||||
import { parseBoolean } from '~/lib/utils/common_utils';
|
||||
import DesignDetail from '~/work_items/components/design_management/design_preview/design_details.vue';
|
||||
import { ROUTES } from '~/work_items/constants';
|
||||
|
|
@ -65,8 +63,6 @@ export async function mountIssuesListApp() {
|
|||
return null;
|
||||
}
|
||||
|
||||
addShortcutsExtension(ShortcutsWorkItems);
|
||||
|
||||
Vue.use(VueApollo);
|
||||
Vue.use(VueRouter);
|
||||
|
||||
|
|
|
|||
|
|
@ -21,6 +21,8 @@ import * as Sentry from '~/sentry/sentry_browser_wrapper';
|
|||
import { addHierarchyChild, setNewWorkItemCache } from '~/work_items/graphql/cache_utils';
|
||||
import { findWidget } from '~/issues/list/utils';
|
||||
import TitleSuggestions from '~/issues/new/components/title_suggestions.vue';
|
||||
import { addShortcutsExtension } from '~/behaviors/shortcuts';
|
||||
import ShortcutsWorkItems from '~/behaviors/shortcuts/shortcuts_work_items';
|
||||
import {
|
||||
getDisplayReference,
|
||||
getNewWorkItemAutoSaveKey,
|
||||
|
|
@ -518,6 +520,7 @@ export default {
|
|||
document.addEventListener('keydown', this.handleKeydown);
|
||||
|
||||
this.setNumberOfDiscussionsResolved();
|
||||
addShortcutsExtension(ShortcutsWorkItems);
|
||||
},
|
||||
beforeDestroy() {
|
||||
document.removeEventListener('keydown', this.handleKeydown);
|
||||
|
|
@ -907,7 +910,7 @@ export default {
|
|||
/>
|
||||
<work-item-milestone
|
||||
v-if="workItemMilestone"
|
||||
class="work-item-attributes-item"
|
||||
class="js-milestone work-item-attributes-item"
|
||||
:is-group="isGroup"
|
||||
:full-path="fullPath"
|
||||
:work-item-id="workItemId"
|
||||
|
|
|
|||
|
|
@ -17,6 +17,8 @@ import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
|||
import { TYPENAME_GROUP } from '~/graphql_shared/constants';
|
||||
import { isLoggedIn } from '~/lib/utils/common_utils';
|
||||
import { WORKSPACE_PROJECT } from '~/issues/constants';
|
||||
import { addShortcutsExtension } from '~/behaviors/shortcuts';
|
||||
import ShortcutsWorkItems from '~/behaviors/shortcuts/shortcuts_work_items';
|
||||
import {
|
||||
i18n,
|
||||
WIDGET_TYPE_ASSIGNEES,
|
||||
|
|
@ -509,6 +511,9 @@ export default {
|
|||
return this.isModal || this.isDrawer;
|
||||
},
|
||||
},
|
||||
mounted() {
|
||||
addShortcutsExtension(ShortcutsWorkItems);
|
||||
},
|
||||
methods: {
|
||||
handleWorkItemCreated() {
|
||||
this.$apollo.queries.workItem.refetch();
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import { DESIGN_MARK_APP_START, DESIGN_MEASURE_BEFORE_APP } from '~/performance/
|
|||
import { performanceMarkAndMeasure } from '~/performance/utils';
|
||||
import { WORKSPACE_GROUP } from '~/issues/constants';
|
||||
import { addShortcutsExtension } from '~/behaviors/shortcuts';
|
||||
import ShortcutsWorkItems from '~/behaviors/shortcuts/shortcuts_work_items';
|
||||
import ShortcutsNavigation from '~/behaviors/shortcuts/shortcuts_navigation';
|
||||
import { parseBoolean } from '~/lib/utils/common_utils';
|
||||
import { injectVueAppBreadcrumbs } from '~/lib/utils/breadcrumbs';
|
||||
|
|
@ -24,7 +23,6 @@ export const initWorkItemsRoot = ({ workItemType, workspaceType, withTabs } = {}
|
|||
}
|
||||
|
||||
addShortcutsExtension(ShortcutsNavigation);
|
||||
addShortcutsExtension(ShortcutsWorkItems);
|
||||
|
||||
const {
|
||||
canAdminLabel,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ImportExportUpload < ApplicationRecord
|
||||
include EachBatch
|
||||
include WithUploads
|
||||
|
||||
belongs_to :project
|
||||
|
|
@ -21,6 +22,7 @@ class ImportExportUpload < ApplicationRecord
|
|||
|
||||
scope :updated_before, ->(date) { where('updated_at < ?', date) }
|
||||
scope :with_export_file, -> { where.not(export_file: nil) }
|
||||
scope :with_import_file, -> { where.not(import_file: nil) }
|
||||
|
||||
def retrieve_upload(_identifier, paths)
|
||||
Upload.find_by(model: self, path: paths)
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ module Groups
|
|||
Gitlab::Tracking.event(self.class.name, 'create', label: 'import_group_from_file')
|
||||
|
||||
if valid_user_permissions? && import_file && valid_import_file? && restorers.all?(&:restore)
|
||||
remove_import_file
|
||||
notify_success
|
||||
|
||||
Gitlab::Tracking.event(
|
||||
|
|
@ -47,7 +48,6 @@ module Groups
|
|||
|
||||
ensure
|
||||
remove_base_tmp_dir
|
||||
remove_import_file
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Import
|
||||
class ImportFileCleanupService
|
||||
LAST_MODIFIED = 72.hours
|
||||
BATCH_SIZE = 100
|
||||
|
||||
def execute
|
||||
ImportExportUpload
|
||||
.with_import_file
|
||||
.updated_before(LAST_MODIFIED.ago)
|
||||
.each_batch(of: BATCH_SIZE) do |batch|
|
||||
batch.each do |upload|
|
||||
::Gitlab::Import::RemoveImportFileWorker.perform_async(upload.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -636,6 +636,16 @@
|
|||
:idempotent: false
|
||||
:tags: []
|
||||
:queue_namespace: :cronjob
|
||||
- :name: cronjob:import_import_file_cleanup
|
||||
:worker_name: Gitlab::Import::ImportFileCleanupWorker
|
||||
:feature_category: :importers
|
||||
:has_external_dependencies: false
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
:queue_namespace: :cronjob
|
||||
- :name: cronjob:import_stuck_project_import_jobs
|
||||
:worker_name: Gitlab::Import::StuckProjectImportJobsWorker
|
||||
:feature_category: :importers
|
||||
|
|
@ -3820,6 +3830,16 @@
|
|||
:idempotent: true
|
||||
:tags: []
|
||||
:queue_namespace:
|
||||
- :name: import_remove_import_file
|
||||
:worker_name: Gitlab::Import::RemoveImportFileWorker
|
||||
:feature_category: :importers
|
||||
:has_external_dependencies: false
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
:queue_namespace:
|
||||
- :name: import_user_mapping_assignment_from_csv
|
||||
:worker_name: Import::UserMapping::AssignmentFromCsvWorker
|
||||
:feature_category: :importers
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Import
|
||||
class ImportFileCleanupWorker
|
||||
include ApplicationWorker
|
||||
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext -- no context in this worker
|
||||
|
||||
idempotent!
|
||||
feature_category :importers
|
||||
data_consistency :sticky
|
||||
|
||||
def perform
|
||||
::Import::ImportFileCleanupService.new.execute
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Import
|
||||
class RemoveImportFileWorker
|
||||
include ApplicationWorker
|
||||
|
||||
idempotent!
|
||||
feature_category :importers
|
||||
data_consistency :sticky
|
||||
|
||||
def perform(upload_id)
|
||||
upload = ImportExportUpload.find_by_id(upload_id)
|
||||
|
||||
return unless upload
|
||||
|
||||
upload.remove_import_file!
|
||||
upload.save!
|
||||
|
||||
logger.info(
|
||||
message: 'Removed ImportExportUpload import_file',
|
||||
project_id: upload.project_id,
|
||||
group_id: upload.group_id
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def logger
|
||||
@logger ||= ::Import::Framework::Logger.build
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -6,6 +6,7 @@ class RepositoryImportWorker # rubocop:disable Scalability/IdempotentWorker
|
|||
data_consistency :always
|
||||
include ExceptionBacktrace
|
||||
include ProjectStartImport
|
||||
include Sidekiq::InterruptionsExhausted
|
||||
|
||||
feature_category :importers
|
||||
worker_has_external_dependencies!
|
||||
|
|
@ -14,6 +15,10 @@ class RepositoryImportWorker # rubocop:disable Scalability/IdempotentWorker
|
|||
sidekiq_options status_expiration: Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION
|
||||
worker_resource_boundary :memory
|
||||
|
||||
sidekiq_interruptions_exhausted do |job|
|
||||
new.perform_failure(job['args'].first)
|
||||
end
|
||||
|
||||
def perform(project_id)
|
||||
Gitlab::QueryLimiting.disable!('https://gitlab.com/gitlab-org/gitlab/-/issues/464677')
|
||||
|
||||
|
|
@ -37,6 +42,15 @@ class RepositoryImportWorker # rubocop:disable Scalability/IdempotentWorker
|
|||
end
|
||||
end
|
||||
|
||||
def perform_failure(project_id)
|
||||
@project = Project.find_by_id(project_id)
|
||||
import_export_upload = @project.import_export_uploads.find_by_user_id(project.creator.id)
|
||||
|
||||
fail_import('Import process reached the maximum number of interruptions')
|
||||
|
||||
::Gitlab::Import::RemoveImportFileWorker.perform_async(import_export_upload.id)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project
|
||||
|
|
|
|||
|
|
@ -536,6 +536,9 @@ Settings.cron_jobs['repository_archive_cache_worker']['job_class'] = 'Repository
|
|||
Settings.cron_jobs['import_export_project_cleanup_worker'] ||= {}
|
||||
Settings.cron_jobs['import_export_project_cleanup_worker']['cron'] ||= '0 * * * *'
|
||||
Settings.cron_jobs['import_export_project_cleanup_worker']['job_class'] = 'ImportExportProjectCleanupWorker'
|
||||
Settings.cron_jobs['gitlab_import_import_file_cleanup_worker'] ||= {}
|
||||
Settings.cron_jobs['gitlab_import_import_file_cleanup_worker']['cron'] ||= '30 * * * *'
|
||||
Settings.cron_jobs['gitlab_import_import_file_cleanup_worker']['job_class'] = 'Gitlab::Import::ImportFileCleanupWorker'
|
||||
Settings.cron_jobs['ci_archive_traces_cron_worker'] ||= {}
|
||||
Settings.cron_jobs['ci_archive_traces_cron_worker']['cron'] ||= '17 * * * *'
|
||||
Settings.cron_jobs['ci_archive_traces_cron_worker']['job_class'] = 'Ci::ArchiveTracesCronWorker'
|
||||
|
|
@ -1113,7 +1116,7 @@ Gitlab.ee do
|
|||
# The os/arch for which duo-workflow-executor binary is build: https://gitlab.com/gitlab-org/duo-workflow/duo-workflow-executor/-/packages/35054593
|
||||
executor_binary_urls = %w[
|
||||
linux/arm linux/amd64 linux/arm64 linux/386 linux/ppc64le darwin/arm64 darwin/amd64
|
||||
freebsd/arm freebsd/386 freebsd/amd64 windows/amd64 windows/386
|
||||
freebsd/arm freebsd/386 freebsd/amd64 windows/amd64 windows/386 windows/arm64
|
||||
].index_with do |os_info|
|
||||
"https://gitlab.com/api/v4/projects/58711783/packages/generic/duo-workflow-executor/#{executor_version}/#{os_info.sub('/', '-')}-duo-workflow-executor.tar.gz"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -481,6 +481,8 @@
|
|||
- 1
|
||||
- - import_refresh_import_jid
|
||||
- 1
|
||||
- - import_remove_import_file
|
||||
- 1
|
||||
- - import_user_mapping_assignment_from_csv
|
||||
- 1
|
||||
- - incident_management
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddPartialIndexToImportExportUploadsOnUpdateAtAndId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.10'
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'idx_import_export_uploads_updated_at_id_import_file'
|
||||
|
||||
def up
|
||||
add_concurrent_index :import_export_uploads, [:updated_at, :id], where: 'import_file IS NOT NULL', name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :import_export_uploads, INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
ce4fb636326638e05acb2a7dc0109706faa85b117232e7dd5e9f32cf07240152
|
||||
|
|
@ -31139,6 +31139,8 @@ CREATE INDEX idx_hosted_runner_usage_on_project_billing_month ON ci_gitlab_hoste
|
|||
|
||||
CREATE UNIQUE INDEX idx_hosted_runner_usage_unique ON ci_gitlab_hosted_runner_monthly_usages USING btree (runner_id, billing_month, root_namespace_id, project_id);
|
||||
|
||||
CREATE INDEX idx_import_export_uploads_updated_at_id_import_file ON import_export_uploads USING btree (updated_at, id) WHERE (import_file IS NOT NULL);
|
||||
|
||||
CREATE UNIQUE INDEX idx_import_placeholder_memberships_on_source_user_group_id ON import_placeholder_memberships USING btree (source_user_id, group_id);
|
||||
|
||||
CREATE INDEX idx_import_placeholder_memberships_on_source_user_id_and_id ON import_placeholder_memberships USING btree (source_user_id, id);
|
||||
|
|
|
|||
|
|
@ -182,7 +182,7 @@ Kubernetes cluster, the restore task expects the restore directories to be
|
|||
empty. However, with Docker and Kubernetes volume mounts, some system level
|
||||
directories may be created at the volume roots, such as the `lost+found`
|
||||
directory found in Linux operating systems. These directories are usually owned
|
||||
by `root`, which can cause access permission errors since the restore Rake task
|
||||
by `root`, which can cause access permission errors because the restore Rake task
|
||||
runs as the `git` user. To restore a GitLab installation, users have to confirm
|
||||
the restore target directories are empty.
|
||||
|
||||
|
|
|
|||
|
|
@ -31,9 +31,8 @@ able to accept a fingerprint. Check the version of OpenSSH on your server.
|
|||
|
||||
## Why use OpenSSH certificates?
|
||||
|
||||
By using OpenSSH certificates all the information about what user on
|
||||
GitLab owns the key is encoded in the key itself, and OpenSSH itself
|
||||
guarantees that users can't fake this, since they'd need to have
|
||||
When you use OpenSSH certificates, information about which GitLab user owns the key is
|
||||
encoded in the key itself. OpenSSH guarantees that users can't fake this because they need
|
||||
access to the private CA signing key.
|
||||
|
||||
When correctly set up, this does away with the requirement of
|
||||
|
|
@ -55,7 +54,7 @@ TrustedUserCAKeys /etc/security/mycompany_user_ca.pub
|
|||
```
|
||||
|
||||
Usually `TrustedUserCAKeys` would not be scoped under a `Match User git`
|
||||
in such a setup, since it would also be used for system logins to
|
||||
in such a setup because it would also be used for system logins to
|
||||
the GitLab server itself, but your setup may vary. If the CA is only
|
||||
used for GitLab consider putting this in the `Match User git` section
|
||||
(described below).
|
||||
|
|
@ -87,9 +86,9 @@ own `AuthorizedPrincipalsCommand` to do that mapping instead of using
|
|||
our provided default.
|
||||
|
||||
The important part is that the `AuthorizedPrincipalsCommand` must be
|
||||
able to map from the "key ID" to a GitLab username in some way, the
|
||||
default command we ship assumes there's a 1=1 mapping between the two,
|
||||
since the whole point of this is to allow us to extract a GitLab
|
||||
able to map from the "key ID" to a GitLab username because the
|
||||
default command we ship assumes there's a 1=1 mapping between the two.
|
||||
The whole point of this is to allow us to extract a GitLab
|
||||
username from the key itself, instead of relying on something like the
|
||||
default public key to username mapping.
|
||||
|
||||
|
|
@ -174,7 +173,7 @@ By default GitLab shows a "You won't be able to pull or push
|
|||
project code via SSH" warning to users who have not uploaded an SSH
|
||||
key to their profile.
|
||||
|
||||
This is counterproductive when using SSH certificates, since users
|
||||
This is counterproductive when using SSH certificates because users
|
||||
aren't expected to upload their own keys.
|
||||
|
||||
To disable this warning globally, go to "Application settings ->
|
||||
|
|
|
|||
|
|
@ -1139,7 +1139,7 @@ end
|
|||
|
||||
{{< alert type="note" >}}
|
||||
|
||||
The script calculates size based on container image layers. Since layers can be shared across multiple projects, the results are approximate but give a good indication of relative disk usage between projects.
|
||||
The script calculates size based on container image layers. Because layers can be shared across multiple projects, the results are approximate but give a good indication of relative disk usage between projects.
|
||||
|
||||
{{< /alert >}}
|
||||
|
||||
|
|
@ -1581,7 +1581,7 @@ flowchart LR
|
|||
The flow described by the diagram above:
|
||||
|
||||
1. A user runs `docker login registry.gitlab.example` on their client. This reaches the web server (or LB) on port 443.
|
||||
1. Web server connects to the Registry backend pool (by default, using port 5000). Since the user
|
||||
1. Web server connects to the Registry backend pool (by default, using port 5000). Because the user
|
||||
didn't provide a valid token, the Registry returns a 401 HTTP code and the URL (`token_realm` from
|
||||
Registry configuration) where to get one. This points to the GitLab API.
|
||||
1. The Docker client then connects to the GitLab API and obtains a token.
|
||||
|
|
|
|||
|
|
@ -421,15 +421,15 @@ a08f14ef632e: Pushing [==================================================>] 2.04
|
|||
error parsing HTTP 403 response body: unexpected end of JSON input: ""
|
||||
```
|
||||
|
||||
This error is ambiguous, as it's not clear whether the 403 is coming from the
|
||||
This error is ambiguous because it's not clear whether the 403 is coming from the
|
||||
GitLab Rails application, the Docker Registry, or something else. In this
|
||||
case, since we know that since the login succeeded, we probably need to look
|
||||
case, because we know that the login succeeded, we probably need to look
|
||||
at the communication between the client and the Registry.
|
||||
|
||||
The REST API between the Docker client and Registry is described
|
||||
[in the Docker documentation](https://distribution.github.io/distribution/spec/api/). Usually, one would just
|
||||
use Wireshark or tcpdump to capture the traffic and see where things went
|
||||
wrong. However, since all communications between Docker clients and servers
|
||||
wrong. However, because all communications between Docker clients and servers
|
||||
are done over HTTPS, it's a bit difficult to decrypt the traffic quickly even
|
||||
if you know the private key. What can we do instead?
|
||||
|
||||
|
|
|
|||
|
|
@ -148,7 +148,7 @@ record. For example:
|
|||
| `max_replica_pools` | The maximum number of replicas each Rails process connects to. This is useful if you run a lot of Postgres replicas and a lot of Rails processes because without this limit every Rails process connects to every replica by default. The default behavior is unlimited if not set. | nil |
|
||||
|
||||
If `record_type` is set to `SRV`, then GitLab continues to use round-robin algorithm
|
||||
and ignores the `weight` and `priority` in the record. Since `SRV` records usually
|
||||
and ignores the `weight` and `priority` in the record. Because `SRV` records usually
|
||||
return hostnames instead of IPs, GitLab needs to look for the IPs of returned hostnames
|
||||
in the additional section of the `SRV` response. If no IP is found for a hostname, GitLab
|
||||
needs to query the configured `nameserver` for `ANY` record for each such hostname looking for `A` or `AAAA`
|
||||
|
|
|
|||
|
|
@ -356,7 +356,7 @@ Configure your load balancer to use the `HTTP(S)` protocol rather than `TCP`.
|
|||
The load balancer will then be responsible for managing SSL certificates and
|
||||
terminating SSL.
|
||||
|
||||
Since communication between the load balancer and GitLab will not be secure,
|
||||
Because communication between the load balancer and GitLab will not be secure,
|
||||
there is some additional configuration needed. See the
|
||||
[proxied SSL documentation](https://docs.gitlab.com/omnibus/settings/ssl/#configure-a-reverse-proxy-or-load-balancer-ssl-termination)
|
||||
for details.
|
||||
|
|
@ -368,7 +368,7 @@ The load balancers will be responsible for managing SSL certificates that
|
|||
end users will see.
|
||||
|
||||
Traffic will also be secure between the load balancers and NGINX in this
|
||||
scenario. There is no need to add configuration for proxied SSL since the
|
||||
scenario. There is no need to add configuration for proxied SSL because the
|
||||
connection will be secure all the way. However, configuration will need to be
|
||||
added to GitLab to configure SSL certificates. See
|
||||
the [HTTPS documentation](https://docs.gitlab.com/omnibus/settings/ssl/)
|
||||
|
|
|
|||
|
|
@ -358,7 +358,7 @@ Configure your load balancer to use the `HTTP(S)` protocol rather than `TCP`.
|
|||
The load balancer will then be responsible for managing SSL certificates and
|
||||
terminating SSL.
|
||||
|
||||
Since communication between the load balancer and GitLab will not be secure,
|
||||
Because communication between the load balancer and GitLab will not be secure,
|
||||
there is some additional configuration needed. See the
|
||||
[proxied SSL documentation](https://docs.gitlab.com/omnibus/settings/ssl/#configure-a-reverse-proxy-or-load-balancer-ssl-termination)
|
||||
for details.
|
||||
|
|
@ -370,7 +370,7 @@ The load balancers will be responsible for managing SSL certificates that
|
|||
end users will see.
|
||||
|
||||
Traffic will also be secure between the load balancers and NGINX in this
|
||||
scenario. There is no need to add configuration for proxied SSL since the
|
||||
scenario. There is no need to add configuration for proxied SSL because the
|
||||
connection will be secure all the way. However, configuration will need to be
|
||||
added to GitLab to configure SSL certificates. See
|
||||
the [HTTPS documentation](https://docs.gitlab.com/omnibus/settings/ssl/)
|
||||
|
|
|
|||
|
|
@ -254,7 +254,7 @@ Configure your load balancer to use the `HTTP(S)` protocol rather than `TCP`.
|
|||
The load balancer will then be responsible for managing SSL certificates and
|
||||
terminating SSL.
|
||||
|
||||
Since communication between the load balancer and GitLab will not be secure,
|
||||
Because communication between the load balancer and GitLab will not be secure,
|
||||
there is some additional configuration needed. See the
|
||||
[proxied SSL documentation](https://docs.gitlab.com/omnibus/settings/ssl/#configure-a-reverse-proxy-or-load-balancer-ssl-termination)
|
||||
for details.
|
||||
|
|
@ -266,7 +266,7 @@ The load balancers will be responsible for managing SSL certificates that
|
|||
end users will see.
|
||||
|
||||
Traffic will also be secure between the load balancers and NGINX in this
|
||||
scenario. There is no need to add configuration for proxied SSL since the
|
||||
scenario. There is no need to add configuration for proxied SSL because the
|
||||
connection will be secure all the way. However, configuration will need to be
|
||||
added to GitLab to configure SSL certificates. See
|
||||
the [HTTPS documentation](https://docs.gitlab.com/omnibus/settings/ssl/)
|
||||
|
|
|
|||
|
|
@ -343,7 +343,7 @@ Configure your load balancer to use the `HTTP(S)` protocol rather than `TCP`.
|
|||
The load balancer will then be responsible for managing SSL certificates and
|
||||
terminating SSL.
|
||||
|
||||
Since communication between the load balancer and GitLab will not be secure,
|
||||
Because communication between the load balancer and GitLab will not be secure,
|
||||
there is some additional configuration needed. See the
|
||||
[proxied SSL documentation](https://docs.gitlab.com/omnibus/settings/ssl/#configure-a-reverse-proxy-or-load-balancer-ssl-termination)
|
||||
for details.
|
||||
|
|
@ -355,7 +355,7 @@ The load balancers will be responsible for managing SSL certificates that
|
|||
end users will see.
|
||||
|
||||
Traffic will also be secure between the load balancers and NGINX in this
|
||||
scenario. There is no need to add configuration for proxied SSL since the
|
||||
scenario. There is no need to add configuration for proxied SSL because the
|
||||
connection will be secure all the way. However, configuration will need to be
|
||||
added to GitLab to configure SSL certificates. See
|
||||
the [HTTPS documentation](https://docs.gitlab.com/omnibus/settings/ssl/)
|
||||
|
|
|
|||
|
|
@ -364,7 +364,7 @@ Configure your load balancer to use the `HTTP(S)` protocol rather than `TCP`.
|
|||
The load balancer will then be responsible for managing SSL certificates and
|
||||
terminating SSL.
|
||||
|
||||
Since communication between the load balancer and GitLab will not be secure,
|
||||
Because communication between the load balancer and GitLab will not be secure,
|
||||
there is some additional configuration needed. See the
|
||||
[proxied SSL documentation](https://docs.gitlab.com/omnibus/settings/ssl/#configure-a-reverse-proxy-or-load-balancer-ssl-termination)
|
||||
for details.
|
||||
|
|
@ -376,7 +376,7 @@ The load balancers will be responsible for managing SSL certificates that
|
|||
end users will see.
|
||||
|
||||
Traffic will also be secure between the load balancers and NGINX in this
|
||||
scenario. There is no need to add configuration for proxied SSL since the
|
||||
scenario. There is no need to add configuration for proxied SSL because the
|
||||
connection will be secure all the way. However, configuration will need to be
|
||||
added to GitLab to configure SSL certificates. See
|
||||
the [HTTPS documentation](https://docs.gitlab.com/omnibus/settings/ssl/)
|
||||
|
|
|
|||
|
|
@ -346,7 +346,7 @@ Configure your load balancer to use the `HTTP(S)` protocol rather than `TCP`.
|
|||
The load balancer will then be responsible for managing SSL certificates and
|
||||
terminating SSL.
|
||||
|
||||
Since communication between the load balancer and GitLab will not be secure,
|
||||
Because communication between the load balancer and GitLab will not be secure,
|
||||
there is some additional configuration needed. See the
|
||||
[proxied SSL documentation](https://docs.gitlab.com/omnibus/settings/ssl/#configure-a-reverse-proxy-or-load-balancer-ssl-termination)
|
||||
for details.
|
||||
|
|
@ -358,7 +358,7 @@ The load balancers will be responsible for managing SSL certificates that
|
|||
end users will see.
|
||||
|
||||
Traffic will also be secure between the load balancers and NGINX in this
|
||||
scenario. There is no need to add configuration for proxied SSL since the
|
||||
scenario. There is no need to add configuration for proxied SSL because the
|
||||
connection will be secure all the way. However, configuration will need to be
|
||||
added to GitLab to configure SSL certificates. See
|
||||
the [HTTPS documentation](https://docs.gitlab.com/omnibus/settings/ssl/)
|
||||
|
|
|
|||
|
|
@ -117,6 +117,12 @@ Example response:
|
|||
|
||||
### Specify a custom email address
|
||||
|
||||
{{< history >}}
|
||||
|
||||
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/178689) in GitLab 17.9.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
You can specify a custom email address at service account creation to receive
|
||||
notifications on this service account's actions.
|
||||
|
||||
|
|
|
|||
|
|
@ -89,6 +89,13 @@ inside the Workload Identity Pool created in the previous step, using the follow
|
|||
|
||||
You must map every attribute that you want to use for permission granting. For example, if you want to map permissions in the next step based on the user's email address, you must map `attribute.user_email` to `assertion.user_email`.
|
||||
|
||||
{{< alert type="warning" >}}
|
||||
|
||||
For projects hosted on GitLab.com, GCP requires you to
|
||||
[limit access to only tokens issued by your GitLab group](https://cloud.google.com/iam/docs/workload-identity-federation-with-deployment-pipelines#gitlab-saas_2).
|
||||
|
||||
{{< /alert >}}
|
||||
|
||||
## Grant permissions for Service Account impersonation
|
||||
|
||||
Creating the Workload Identity Pool and Workload Identity Provider defines the _authentication_
|
||||
|
|
|
|||
|
|
@ -1538,6 +1538,8 @@ To create a diagram by using the Draw.io web application:
|
|||
1. In the Draw.io web application, select **File** > **Export as** > **SVG**.
|
||||
1. Select the **Include a copy of my diagram: All pages** checkbox, then select **Export**. Use
|
||||
the file extension `drawio.svg` to indicate it can be edited in Draw.io.
|
||||
1. [Add the SVG to the docs as an image](#add-the-image-link-to-content).
|
||||
These SVGs use the same Markdown as other non-SVG images.
|
||||
|
||||
##### Use the VS Code extension
|
||||
|
||||
|
|
@ -1549,6 +1551,8 @@ To create a diagram by using the Draw.io Integration extension for VS Code:
|
|||
1. Save the file.
|
||||
|
||||
The diagram's definition is stored in Draw.io-compatible format in the SVG file.
|
||||
1. [Add the SVG to the docs as an image](#add-the-image-link-to-content).
|
||||
These SVGs use the same Markdown as other non-SVG images.
|
||||
|
||||
## Emoji
|
||||
|
||||
|
|
|
|||
|
|
@ -57,8 +57,9 @@ may require up to 10 minutes before the policy changes take effect.
|
|||
|
||||
{{< history >}}
|
||||
|
||||
- The deletion protection for security policy projects was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/482967) in GitLab 17.8 [with a flag](../../../administration/feature_flags.md) named `reject_security_policy_project_deletion`. Disabled by default.
|
||||
- The deletion protection for security policy projects was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/482967) in GitLab 17.8 [with a flag](../../../administration/feature_flags.md) named `reject_security_policy_project_deletion`. Enabled by default.
|
||||
- The deletion protection for groups that contain security policy projects was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/512043) in GitLab 17.9 [with a flag](../../../administration/feature_flags.md) named `reject_security_policy_project_deletion_groups`. Disabled by default.
|
||||
- The deletion protection for security policy projects is generally available in GitLab 17.10. Feature flag `reject_security_policy_project_deletion` removed in GitLab 17.10.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
|
|
|
|||
|
|
@ -16,6 +16,8 @@ module Gitlab
|
|||
coverage retry parallel timeout
|
||||
release id_tokens publish pages manual_confirmation run].freeze
|
||||
|
||||
PUBLIC_DIR = 'public'
|
||||
|
||||
validations do
|
||||
validates :config, allowed_keys: Gitlab::Ci::Config::Entry::Job.allowed_keys + PROCESSABLE_ALLOWED_KEYS
|
||||
validates :config, mutually_exclusive_keys: %i[script run]
|
||||
|
|
@ -169,7 +171,7 @@ module Gitlab
|
|||
retry: retry_defined? ? retry_value : nil,
|
||||
parallel: has_parallel? ? parallel_value : nil,
|
||||
timeout: parsed_timeout,
|
||||
artifacts: artifacts_value,
|
||||
artifacts: artifacts_with_pages_publish_path,
|
||||
release: release_value,
|
||||
after_script: after_script_value,
|
||||
hooks: hooks_value,
|
||||
|
|
@ -201,6 +203,17 @@ module Gitlab
|
|||
name == :pages && config[:pages] != false # legacy behavior, overridable with `pages: false`
|
||||
end
|
||||
|
||||
def artifacts_with_pages_publish_path
|
||||
return artifacts_value unless pages_job?
|
||||
|
||||
artifacts = artifacts_value || {}
|
||||
artifacts = artifacts.reverse_merge(paths: [])
|
||||
|
||||
return artifacts if artifacts[:paths].include?(pages_publish_path)
|
||||
|
||||
artifacts.merge(paths: artifacts[:paths] + [pages_publish_path])
|
||||
end
|
||||
|
||||
def self.allowed_keys
|
||||
ALLOWED_KEYS
|
||||
end
|
||||
|
|
@ -218,6 +231,14 @@ module Gitlab
|
|||
|
||||
allow_failure_value
|
||||
end
|
||||
|
||||
def pages_publish_path
|
||||
path = config[:publish] || PUBLIC_DIR
|
||||
|
||||
return path unless config[:pages].is_a?(Hash)
|
||||
|
||||
config.dig(:pages, :publish) || path
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ module Gitlab
|
|||
module Database
|
||||
module Sos
|
||||
TASKS = [
|
||||
Sos::DbStatsActivity,
|
||||
Sos::PgSchemaDump
|
||||
Sos::ArSchemaDump,
|
||||
Sos::DbStatsActivity
|
||||
].freeze
|
||||
|
||||
def self.run(output_file)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
module Gitlab
|
||||
module Database
|
||||
module Sos
|
||||
class PgSchemaDump
|
||||
class ArSchemaDump
|
||||
attr_reader :connection, :name, :output
|
||||
|
||||
def initialize(connection, name, output)
|
||||
|
|
@ -19,6 +19,8 @@ module Gitlab
|
|||
|
||||
def execute
|
||||
if import_file && check_version! && restorers.all?(&:restore) && overwrite_project
|
||||
remove_import_file
|
||||
|
||||
project
|
||||
else
|
||||
raise Projects::ImportService::Error, shared.errors.to_sentence
|
||||
|
|
@ -32,7 +34,6 @@ module Gitlab
|
|||
raise Projects::ImportService::Error, e.message
|
||||
ensure
|
||||
remove_base_tmp_dir
|
||||
remove_import_file
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -35347,6 +35347,9 @@ msgstr ""
|
|||
msgid "MemberRole|Actions"
|
||||
msgstr ""
|
||||
|
||||
msgid "MemberRole|Added from %{role}"
|
||||
msgstr ""
|
||||
|
||||
msgid "MemberRole|Are you sure you want to delete this custom role?"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -32,6 +32,14 @@ RSpec.describe 'Create issue work item', :js, feature_category: :team_planning d
|
|||
expect(page).to have_css('h1', text: 'I am a new issue')
|
||||
expect(page).to have_text 'Issue created'
|
||||
end
|
||||
|
||||
context 'when using keyboard' do
|
||||
it 'supports shortcuts' do
|
||||
find('body').native.send_key('l')
|
||||
|
||||
expect(find('.js-labels')).to have_selector('.gl-new-dropdown-panel')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Ci::Build::Prerequisite::ManagedResource, feature_category: :continuous_integration do
|
||||
RSpec.describe Gitlab::Ci::Build::Prerequisite::ManagedResource, feature_category: :continuous_delivery do
|
||||
describe '#unmet?' do
|
||||
let_it_be(:organization) { create(:group) }
|
||||
let_it_be(:agent_management_project) { create(:project, :private, :repository, group: organization) }
|
||||
|
|
|
|||
|
|
@ -790,6 +790,35 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo
|
|||
end
|
||||
end
|
||||
|
||||
describe '#artifacts_with_pages_publish_path', :aggregate_failures, feature_category: :pages do
|
||||
where(:name, :config, :result) do
|
||||
:pages | {} | { paths: ["public"] }
|
||||
:pages | { pages: { publish: 'foo' } } | { paths: ["foo"] }
|
||||
:pages | { pages: { publish: 'foo' }, artifacts: { paths: ['foo'] } } | { paths: ["foo"] }
|
||||
:pages | { artifacts: { paths: ['foo'] } } | { paths: %w[foo public] }
|
||||
:pages | { pages: { publish: 'foo' }, artifacts: { paths: ['bar'] } } | { paths: %w[bar foo] }
|
||||
:pages | { pages: { publish: 'public' }, artifacts: { paths: ['public'] } } | { paths: ["public"] }
|
||||
:pages | { artifacts: {} } | { paths: ["public"] }
|
||||
:pages | { artifacts: { paths: [] } } | { paths: ["public"] }
|
||||
:pages | { pages: false } | nil
|
||||
:'non-pages' | {} | nil
|
||||
:custom | { pages: { publish: 'foo' } } | { paths: ["foo"] }
|
||||
:custom | { pages: true, publish: 'foo', artifacts: { paths: ['bar'] } } | { paths: %w[bar foo] }
|
||||
end
|
||||
|
||||
before do
|
||||
allow_next_instance_of(described_class) do |job|
|
||||
allow(job).to receive(:artifacts_value).and_return(config[:artifacts])
|
||||
end
|
||||
end
|
||||
|
||||
with_them do
|
||||
subject { described_class.new(config, name: name).artifacts_with_pages_publish_path }
|
||||
|
||||
it { is_expected.to eq(result) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#relevant?' do
|
||||
it 'is a relevant entry' do
|
||||
entry = described_class.new({ script: 'rspec' }, name: :rspec)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Database::Sos::PgSchemaDump, feature_category: :database do
|
||||
RSpec.describe Gitlab::Database::Sos::ArSchemaDump, feature_category: :database do
|
||||
let(:temp_directory) { Dir.mktmpdir }
|
||||
let(:output) { Gitlab::Database::Sos::Output.new(temp_directory, mode: :directory) }
|
||||
let(:db_name) { 'test_db' }
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::ImportExport::Importer do
|
||||
RSpec.describe Gitlab::ImportExport::Importer, feature_category: :importers do
|
||||
let(:user) { create(:user) }
|
||||
let(:test_path) { "#{Dir.tmpdir}/importer_spec" }
|
||||
let(:shared) { project.import_export_shared }
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe ImportExportUpload do
|
||||
RSpec.describe ImportExportUpload, feature_category: :importers do
|
||||
let(:project) { create(:project) }
|
||||
|
||||
subject(:import_export_upload) { described_class.new(project: project) }
|
||||
|
|
@ -35,7 +35,7 @@ RSpec.describe ImportExportUpload do
|
|||
|
||||
describe 'scopes' do
|
||||
let_it_be(:upload1) { create(:import_export_upload, export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')) }
|
||||
let_it_be(:upload2) { create(:import_export_upload, export_file: nil) }
|
||||
let_it_be(:upload2) { create(:import_export_upload, export_file: nil, import_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')) }
|
||||
let_it_be(:upload3) { create(:import_export_upload, export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'), updated_at: 25.hours.ago) }
|
||||
let_it_be(:upload4) { create(:import_export_upload, export_file: nil, updated_at: 2.days.ago) }
|
||||
|
||||
|
|
@ -45,6 +45,12 @@ RSpec.describe ImportExportUpload do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.with_import_file' do
|
||||
it 'returns uploads with import file' do
|
||||
expect(described_class.with_import_file).to contain_exactly(upload2)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.updated_before' do
|
||||
it 'returns uploads for a specified date' do
|
||||
expect(described_class.updated_before(24.hours.ago)).to contain_exactly(upload3, upload4)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Import::ImportFileCleanupService, feature_category: :importers do
|
||||
subject(:service) { described_class.new }
|
||||
|
||||
describe '#execute' do
|
||||
it 'enqueues a removal job for old import_file' do
|
||||
upload = create(
|
||||
:import_export_upload,
|
||||
updated_at: (described_class::LAST_MODIFIED + 1.hour).ago,
|
||||
import_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')
|
||||
)
|
||||
|
||||
expect(::Gitlab::Import::RemoveImportFileWorker).to receive(:perform_async).with(upload.id)
|
||||
|
||||
service.execute
|
||||
end
|
||||
|
||||
context 'when import_file is new' do
|
||||
it 'does not enqueue removal job' do
|
||||
create(
|
||||
:import_export_upload,
|
||||
updated_at: (described_class::LAST_MODIFIED - 1.hour).ago,
|
||||
import_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')
|
||||
)
|
||||
|
||||
expect(::Gitlab::Import::RemoveImportFileWorker).not_to receive(:perform_async)
|
||||
|
||||
service.execute
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Import::ImportFileCleanupWorker, feature_category: :importers do
|
||||
subject(:worker) { described_class.new }
|
||||
|
||||
describe '#perform' do
|
||||
it_behaves_like 'an idempotent worker'
|
||||
|
||||
it 'executes Import::ImportFileCleanupService' do
|
||||
expect_next_instance_of(Import::ImportFileCleanupService) do |service|
|
||||
expect(service).to receive(:execute)
|
||||
end
|
||||
|
||||
worker.perform
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Import::RemoveImportFileWorker, feature_category: :importers do
|
||||
let(:upload) do
|
||||
create(
|
||||
:import_export_upload,
|
||||
updated_at: 4.days.ago,
|
||||
import_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')
|
||||
)
|
||||
end
|
||||
|
||||
subject(:worker) { described_class.new }
|
||||
|
||||
describe '#perform' do
|
||||
before do
|
||||
allow_next_instance_of(::Import::Framework::Logger) do |logger|
|
||||
allow(logger).to receive(:info)
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'an idempotent worker' do
|
||||
let(:job_args) { [upload.id] }
|
||||
end
|
||||
|
||||
it 'removes import_file of the upload and logs' do
|
||||
expect_next_instance_of(::Import::Framework::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:info)
|
||||
.with(
|
||||
message: 'Removed ImportExportUpload import_file',
|
||||
project_id: upload.project_id,
|
||||
group_id: upload.group_id
|
||||
)
|
||||
end
|
||||
|
||||
expect { worker.perform(upload.id) }.to change { upload.reload.import_file.file.nil? }.to(true)
|
||||
end
|
||||
|
||||
context 'when upload cannot be found' do
|
||||
it 'returns' do
|
||||
expect(ImportExportUpload).to receive(:find_by_id).with(upload.id).and_return(nil)
|
||||
allow(upload).to receive(:remove_import_file!)
|
||||
|
||||
worker.perform(upload.id)
|
||||
|
||||
expect(upload).not_to have_received(:remove_import_file!)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -3,18 +3,18 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe RepositoryImportWorker, feature_category: :importers do
|
||||
let(:project) { build_stubbed(:project, :import_scheduled, import_state: import_state, import_url: 'url') }
|
||||
let(:import_state) { create(:import_state, status: :scheduled) }
|
||||
let(:jid) { '12345678' }
|
||||
|
||||
before do
|
||||
allow(subject).to receive(:jid).and_return(jid)
|
||||
allow(Project).to receive(:find_by_id).with(project.id).and_return(project)
|
||||
allow(project).to receive(:after_import)
|
||||
allow(import_state).to receive(:start).and_return(true)
|
||||
end
|
||||
|
||||
describe '#perform' do
|
||||
let(:project) { build_stubbed(:project, :import_scheduled, import_state: import_state, import_url: 'url') }
|
||||
let(:import_state) { create(:import_state, status: :scheduled) }
|
||||
let(:jid) { '12345678' }
|
||||
|
||||
before do
|
||||
allow(subject).to receive(:jid).and_return(jid)
|
||||
allow(Project).to receive(:find_by_id).with(project.id).and_return(project)
|
||||
allow(project).to receive(:after_import)
|
||||
allow(import_state).to receive(:start).and_return(true)
|
||||
end
|
||||
|
||||
context 'when project not found (deleted)' do
|
||||
before do
|
||||
allow(Project).to receive(:find_by_id).with(project.id).and_return(nil)
|
||||
|
|
@ -88,4 +88,22 @@ RSpec.describe RepositoryImportWorker, feature_category: :importers do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.sidekiq_interruptions_exhausted' do
|
||||
it 'sets import status to failed and removes import_file' do
|
||||
user = build_stubbed(:user)
|
||||
upload = build_stubbed(:import_export_upload, project: project, user: user)
|
||||
job = { 'args' => [project.id] }
|
||||
|
||||
allow(import_state).to receive(:mark_as_failed)
|
||||
allow(project).to receive_message_chain(:import_export_uploads, :find_by_user_id).and_return(upload)
|
||||
expect(::Gitlab::Import::RemoveImportFileWorker).to receive(:perform_async).with(upload.id)
|
||||
|
||||
described_class.interruptions_exhausted_block.call(job)
|
||||
|
||||
expect(import_state)
|
||||
.to have_received(:mark_as_failed)
|
||||
.with('Import process reached the maximum number of interruptions')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue