Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
8191b1571c
commit
18f7828977
|
|
@ -20,7 +20,7 @@ code_quality:
|
|||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: ""
|
||||
CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/security-products/codequality:0.85.6"
|
||||
CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/security-products/codequality:0.85.9"
|
||||
script:
|
||||
- |
|
||||
if ! docker info &>/dev/null; then
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ function renderMermaids($els) {
|
|||
// mermaidAPI options
|
||||
theme: 'neutral',
|
||||
flowchart: {
|
||||
useMaxWidth: true,
|
||||
htmlLabels: false,
|
||||
},
|
||||
securityLevel: 'strict',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Projects
|
||||
class ExportJobFinder
|
||||
InvalidExportJobStatusError = Class.new(StandardError)
|
||||
attr_reader :project, :params
|
||||
|
||||
def initialize(project, params = {})
|
||||
@project = project
|
||||
@params = params
|
||||
end
|
||||
|
||||
def execute
|
||||
export_jobs = project.export_jobs
|
||||
export_jobs = by_status(export_jobs)
|
||||
|
||||
export_jobs
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def by_status(export_jobs)
|
||||
return export_jobs unless params[:status]
|
||||
raise InvalidExportJobStatusError, 'Invalid export job status' unless ProjectExportJob.state_machines[:status].states.map(&:name).include?(params[:status])
|
||||
|
||||
export_jobs.with_status(params[:status])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -10,6 +10,7 @@ module Ci
|
|||
include HasRef
|
||||
|
||||
InvalidBridgeTypeError = Class.new(StandardError)
|
||||
InvalidTransitionError = Class.new(StandardError)
|
||||
|
||||
belongs_to :project
|
||||
belongs_to :trigger_request
|
||||
|
|
|
|||
|
|
@ -186,6 +186,7 @@ class Project < ApplicationRecord
|
|||
|
||||
has_one :import_state, autosave: true, class_name: 'ProjectImportState', inverse_of: :project
|
||||
has_one :import_export_upload, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
|
||||
has_many :export_jobs, class_name: 'ProjectExportJob'
|
||||
has_one :project_repository, inverse_of: :project
|
||||
has_one :incident_management_setting, inverse_of: :project, class_name: 'IncidentManagement::ProjectIncidentManagementSetting'
|
||||
has_one :error_tracking_setting, inverse_of: :project, class_name: 'ErrorTracking::ProjectErrorTrackingSetting'
|
||||
|
|
@ -1850,10 +1851,12 @@ class Project < ApplicationRecord
|
|||
end
|
||||
|
||||
def export_status
|
||||
if export_in_progress?
|
||||
if regeneration_in_progress?
|
||||
:regeneration_in_progress
|
||||
elsif export_enqueued?
|
||||
:queued
|
||||
elsif export_in_progress?
|
||||
:started
|
||||
elsif after_export_in_progress?
|
||||
:after_export_action
|
||||
elsif export_file_exists?
|
||||
:finished
|
||||
else
|
||||
|
|
@ -1862,11 +1865,19 @@ class Project < ApplicationRecord
|
|||
end
|
||||
|
||||
def export_in_progress?
|
||||
import_export_shared.active_export_count > 0
|
||||
strong_memoize(:export_in_progress) do
|
||||
::Projects::ExportJobFinder.new(self, { status: :started }).execute.present?
|
||||
end
|
||||
end
|
||||
|
||||
def after_export_in_progress?
|
||||
import_export_shared.after_export_in_progress?
|
||||
def export_enqueued?
|
||||
strong_memoize(:export_enqueued) do
|
||||
::Projects::ExportJobFinder.new(self, { status: :queued }).execute.present?
|
||||
end
|
||||
end
|
||||
|
||||
def regeneration_in_progress?
|
||||
(export_enqueued? || export_in_progress?) && export_file_exists?
|
||||
end
|
||||
|
||||
def remove_exports
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ProjectExportJob < ApplicationRecord
|
||||
belongs_to :project
|
||||
|
||||
validates :project, :jid, :status, presence: true
|
||||
|
||||
state_machine :status, initial: :queued do
|
||||
event :start do
|
||||
transition [:queued] => :started
|
||||
end
|
||||
|
||||
event :finish do
|
||||
transition [:started] => :finished
|
||||
end
|
||||
|
||||
event :fail_op do
|
||||
transition [:queued, :started] => :failed
|
||||
end
|
||||
|
||||
state :queued, value: 0
|
||||
state :started, value: 1
|
||||
state :finished, value: 2
|
||||
state :failed, value: 3
|
||||
end
|
||||
end
|
||||
|
|
@ -52,6 +52,11 @@ module Ci
|
|||
subject.drop!(:downstream_pipeline_creation_failed)
|
||||
end
|
||||
end
|
||||
rescue StateMachines::InvalidTransition => e
|
||||
Gitlab::ErrorTracking.track_exception(
|
||||
Ci::Bridge::InvalidTransitionError.new(e.message),
|
||||
bridge_id: bridge.id,
|
||||
downstream_pipeline_id: pipeline.id)
|
||||
end
|
||||
|
||||
def ensure_preconditions!(target_ref)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,14 @@ module Ci
|
|||
def execute(pipeline)
|
||||
return unless pipeline.bridge_triggered?
|
||||
|
||||
pipeline.source_bridge.inherit_status_from_downstream!(pipeline)
|
||||
begin
|
||||
pipeline.source_bridge.inherit_status_from_downstream!(pipeline)
|
||||
rescue StateMachines::InvalidTransition => e
|
||||
Gitlab::ErrorTracking.track_exception(
|
||||
Ci::Bridge::InvalidTransitionError.new(e.message),
|
||||
bridge_id: pipeline.source_bridge.id,
|
||||
downstream_pipeline_id: pipeline.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -234,6 +234,13 @@
|
|||
:resource_boundary: :cpu
|
||||
:weight: 1
|
||||
:idempotent:
|
||||
- :name: cronjob:stuck_export_jobs
|
||||
:feature_category: :importers
|
||||
:has_external_dependencies:
|
||||
:urgency: :default
|
||||
:resource_boundary: :cpu
|
||||
:weight: 1
|
||||
:idempotent:
|
||||
- :name: cronjob:stuck_import_jobs
|
||||
:feature_category: :importers
|
||||
:has_external_dependencies:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module ProjectExportOptions
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
EXPORT_RETRY_COUNT = 3
|
||||
|
||||
included do
|
||||
sidekiq_options retry: EXPORT_RETRY_COUNT, status_expiration: StuckExportJobsWorker::EXPORT_JOBS_EXPIRATION
|
||||
|
||||
# We mark the project export as failed once we have exhausted all retries
|
||||
sidekiq_retries_exhausted do |job|
|
||||
project = Project.find(job['args'][1])
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
job = project.export_jobs.find_by(jid: job["jid"])
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
if job&.fail_op
|
||||
Sidekiq.logger.info "Job #{job['jid']} for project #{project.id} has been set to failed state"
|
||||
else
|
||||
Sidekiq.logger.error "Failed to set Job #{job['jid']} for project #{project.id} to failed state"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -3,17 +3,24 @@
|
|||
class ProjectExportWorker # rubocop:disable Scalability/IdempotentWorker
|
||||
include ApplicationWorker
|
||||
include ExceptionBacktrace
|
||||
include ProjectExportOptions
|
||||
|
||||
sidekiq_options retry: 3
|
||||
feature_category :importers
|
||||
worker_resource_boundary :memory
|
||||
|
||||
def perform(current_user_id, project_id, after_export_strategy = {}, params = {})
|
||||
current_user = User.find(current_user_id)
|
||||
project = Project.find(project_id)
|
||||
export_job = project.export_jobs.safe_find_or_create_by(jid: self.jid)
|
||||
after_export = build!(after_export_strategy)
|
||||
|
||||
export_job&.start
|
||||
|
||||
::Projects::ImportExport::ExportService.new(project, current_user, params).execute(after_export)
|
||||
|
||||
export_job&.finish
|
||||
rescue ActiveRecord::RecordNotFound, Gitlab::ImportExport::AfterExportStrategyBuilder::StrategyNotFoundError => e
|
||||
logger.error("Failed to export project #{project_id}: #{e.message}")
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -0,0 +1,54 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# rubocop:disable Scalability/IdempotentWorker
|
||||
class StuckExportJobsWorker
|
||||
include ApplicationWorker
|
||||
# rubocop:disable Scalability/CronWorkerContext
|
||||
# This worker updates export states inline and does not schedule
|
||||
# other jobs.
|
||||
include CronjobQueue
|
||||
# rubocop:enable Scalability/CronWorkerContext
|
||||
|
||||
feature_category :importers
|
||||
worker_resource_boundary :cpu
|
||||
|
||||
EXPORT_JOBS_EXPIRATION = 6.hours.to_i
|
||||
|
||||
def perform
|
||||
failed_jobs_count = mark_stuck_jobs_as_failed!
|
||||
|
||||
Gitlab::Metrics.add_event(:stuck_export_jobs,
|
||||
failed_jobs_count: failed_jobs_count)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def mark_stuck_jobs_as_failed!
|
||||
jids_and_ids = enqueued_exports.pluck(:jid, :id).to_h
|
||||
|
||||
completed_jids = Gitlab::SidekiqStatus.completed_jids(jids_and_ids.keys)
|
||||
return unless completed_jids.any?
|
||||
|
||||
completed_ids = jids_and_ids.values_at(*completed_jids)
|
||||
|
||||
# We select the export states again, because they may have transitioned from
|
||||
# started to finished while we were looking up their Sidekiq status.
|
||||
completed_jobs = enqueued_exports.where(id: completed_ids)
|
||||
|
||||
Sidekiq.logger.info(
|
||||
message: 'Marked stuck export jobs as failed',
|
||||
job_ids: completed_jobs.map(&:jid)
|
||||
)
|
||||
|
||||
completed_jobs.each do |job|
|
||||
job.fail_op
|
||||
end.count
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
def enqueued_exports
|
||||
ProjectExportJob.with_status([:started, :queued])
|
||||
end
|
||||
end
|
||||
# rubocop:enable Scalability/IdempotentWorker
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix quick actions executing in multiline inline code when placed on its own line
|
||||
merge_request: 24933
|
||||
author: Pavlo Dudchenko
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Update GitLab's codeclimate to 0.85.9
|
||||
merge_request: 26712
|
||||
author: Eddie Stubbington
|
||||
type: other
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix logic to determine project export state and add regeneration_in_progress state
|
||||
merge_request: 23664
|
||||
author:
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix Mermaid flowchart width
|
||||
merge_request: 26848
|
||||
author: julien MILLAU
|
||||
type: fixed
|
||||
|
|
@ -453,6 +453,9 @@ Settings.cron_jobs['remove_unreferenced_lfs_objects_worker']['job_class'] = 'Rem
|
|||
Settings.cron_jobs['stuck_import_jobs_worker'] ||= Settingslogic.new({})
|
||||
Settings.cron_jobs['stuck_import_jobs_worker']['cron'] ||= '15 * * * *'
|
||||
Settings.cron_jobs['stuck_import_jobs_worker']['job_class'] = 'StuckImportJobsWorker'
|
||||
Settings.cron_jobs['stuck_export_jobs_worker'] ||= Settingslogic.new({})
|
||||
Settings.cron_jobs['stuck_export_jobs_worker']['cron'] ||= '30 * * * *'
|
||||
Settings.cron_jobs['stuck_export_jobs_worker']['job_class'] = 'StuckExportJobsWorker'
|
||||
Settings.cron_jobs['gitlab_usage_ping_worker'] ||= Settingslogic.new({})
|
||||
Settings.cron_jobs['gitlab_usage_ping_worker']['cron'] ||= nil # This is dynamically loaded in the sidekiq initializer
|
||||
Settings.cron_jobs['gitlab_usage_ping_worker']['job_class'] = 'GitlabUsagePingWorker'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateProjectExportJobs < ActiveRecord::Migration[6.0]
|
||||
DOWNTIME = false
|
||||
|
||||
def change
|
||||
create_table :project_export_jobs do |t|
|
||||
t.references :project, index: false, null: false, foreign_key: { on_delete: :cascade }
|
||||
t.timestamps_with_timezone null: false
|
||||
t.integer :status, limit: 2, null: false, default: 0
|
||||
t.string :jid, limit: 100, null: false, unique: true
|
||||
|
||||
t.index [:project_id, :jid]
|
||||
t.index [:jid], unique: true
|
||||
t.index [:status]
|
||||
t.index [:project_id, :status]
|
||||
end
|
||||
end
|
||||
end
|
||||
15
db/schema.rb
15
db/schema.rb
|
|
@ -10,7 +10,7 @@
|
|||
#
|
||||
# It's strongly recommended that you check this file into your version control system.
|
||||
|
||||
ActiveRecord::Schema.define(version: 2020_03_10_135823) do
|
||||
ActiveRecord::Schema.define(version: 2020_03_11_165635) do
|
||||
|
||||
# These are extensions that must be enabled in order to support this database
|
||||
enable_extension "pg_trgm"
|
||||
|
|
@ -3242,6 +3242,18 @@ ActiveRecord::Schema.define(version: 2020_03_10_135823) do
|
|||
t.string "organization_name"
|
||||
end
|
||||
|
||||
create_table "project_export_jobs", force: :cascade do |t|
|
||||
t.bigint "project_id", null: false
|
||||
t.datetime_with_timezone "created_at", null: false
|
||||
t.datetime_with_timezone "updated_at", null: false
|
||||
t.integer "status", limit: 2, default: 0, null: false
|
||||
t.string "jid", limit: 100, null: false
|
||||
t.index ["jid"], name: "index_project_export_jobs_on_jid", unique: true
|
||||
t.index ["project_id", "jid"], name: "index_project_export_jobs_on_project_id_and_jid"
|
||||
t.index ["project_id", "status"], name: "index_project_export_jobs_on_project_id_and_status"
|
||||
t.index ["status"], name: "index_project_export_jobs_on_status"
|
||||
end
|
||||
|
||||
create_table "project_feature_usages", primary_key: "project_id", id: :integer, default: nil, force: :cascade do |t|
|
||||
t.datetime "jira_dvcs_cloud_last_sync_at"
|
||||
t.datetime "jira_dvcs_server_last_sync_at"
|
||||
|
|
@ -5017,6 +5029,7 @@ ActiveRecord::Schema.define(version: 2020_03_10_135823) do
|
|||
add_foreign_key "project_deploy_tokens", "deploy_tokens", on_delete: :cascade
|
||||
add_foreign_key "project_deploy_tokens", "projects", on_delete: :cascade
|
||||
add_foreign_key "project_error_tracking_settings", "projects", on_delete: :cascade
|
||||
add_foreign_key "project_export_jobs", "projects", on_delete: :cascade
|
||||
add_foreign_key "project_feature_usages", "projects", on_delete: :cascade
|
||||
add_foreign_key "project_features", "projects", name: "fk_18513d9b92", on_delete: :cascade
|
||||
add_foreign_key "project_group_links", "projects", name: "fk_daa8cee94c", on_delete: :cascade
|
||||
|
|
|
|||
|
|
@ -122,12 +122,12 @@ our AsciiDoc snippets, wikis and repos using delimited blocks:
|
|||
|
||||
- **Markdown**
|
||||
|
||||
~~~markdown
|
||||
````markdown
|
||||
```plantuml
|
||||
Bob -> Alice : hello
|
||||
Alice -> Bob : hi
|
||||
```
|
||||
~~~
|
||||
````
|
||||
|
||||
- **AsciiDoc**
|
||||
|
||||
|
|
|
|||
|
|
@ -4987,6 +4987,20 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "descendantWeightSum",
|
||||
"description": "Total weight of open and closed issues in the epic and its descendants. Available only when feature flag `unfiltered_epic_aggregates` is enabled.",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "OBJECT",
|
||||
"name": "EpicDescendantWeights",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"description": "Description of the epic",
|
||||
|
|
@ -9737,6 +9751,20 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "healthStatus",
|
||||
"description": "Current health status. Available only when feature flag `save_issuable_health_status` is enabled.",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "ENUM",
|
||||
"name": "HealthStatus",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "id",
|
||||
"description": "Global ID of the epic-issue relation",
|
||||
|
|
@ -11117,6 +11145,20 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "healthStatus",
|
||||
"description": "Current health status. Available only when feature flag `save_issuable_health_status` is enabled.",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "ENUM",
|
||||
"name": "HealthStatus",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "iid",
|
||||
"description": "Internal ID of the issue",
|
||||
|
|
@ -13098,6 +13140,47 @@
|
|||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "EpicDescendantWeights",
|
||||
"description": "Total weight of open and closed descendant issues",
|
||||
"fields": [
|
||||
{
|
||||
"name": "closedIssues",
|
||||
"description": "Total weight of completed (closed) issues in this epic, including epic descendants",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "Int",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "openedIssues",
|
||||
"description": "Total weight of opened issues in this epic, including epic descendants",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "Int",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [
|
||||
|
||||
],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "EpicHealthStatus",
|
||||
|
|
|
|||
|
|
@ -61,14 +61,20 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
|
|||
Status can be one of:
|
||||
|
||||
- `none`
|
||||
- `queued`
|
||||
- `started`
|
||||
- `after_export_action`
|
||||
- `finished`
|
||||
- `regeneration_in_progress`
|
||||
|
||||
The `after_export_action` state represents that the export process has been completed successfully and
|
||||
the platform is performing some actions on the resulted file. For example, sending
|
||||
an email notifying the user to download the file, uploading the exported file
|
||||
to a web server, etc.
|
||||
`queued` state represents the request for export is received, and is currently in the queue to be processed.
|
||||
|
||||
The `started` state represents that the export process has started and is currently in progress.
|
||||
It includes the process of exporting, actions performed on the resultant file such as sending
|
||||
an email notifying the user to download the file, uploading the exported file to a web server, etc.
|
||||
|
||||
`finished` state is after the export process has completed and the user has been notified.
|
||||
|
||||
`regeneration_in_progress` is when an export file is available to download, and a request to generate a new export is in process.
|
||||
|
||||
`_links` are only present when export has finished.
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,26 @@ to GitLab!
|
|||
If you have questions that are not answered here, the [GitLab community forum](https://forum.gitlab.com/)
|
||||
can be a great resource.
|
||||
|
||||
## Important differences
|
||||
## Managing the organizational transition
|
||||
|
||||
An important part of transitioning from Jenkins to GitLab is the cultural and organizational
|
||||
changes that comes with the move, and successfully managing them. There are a few
|
||||
things we have found that helps this:
|
||||
|
||||
- Setting and communicating a clear vision of what your migration goals are helps
|
||||
your users understand why the effort is worth it. The value will be clear when
|
||||
the work is done, but people need to be aware while it's in progress too.
|
||||
- Sponsorship and alignment from the relevant leadership team helps with the point above.
|
||||
- Spending time educating your users on what's different, sharing this document with them,
|
||||
and so on will help ensure you are successful.
|
||||
- Finding ways to sequence or delay parts of the migration can help a lot, but you
|
||||
don't want to leave things in a non-migrated (or partially-migrated) state for too
|
||||
long. To gain all the benefits of GitLab, moving your existing Jenkins setup over
|
||||
as-is, including any current problems, will not be enough. You need to take advantage
|
||||
of the improvements that GitLab offers, and this requires (eventually) updating
|
||||
your implementation as part of the transition.
|
||||
|
||||
## Important product differences
|
||||
|
||||
There are some high level differences between the products worth mentioning:
|
||||
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ All labels, their meaning and priority are defined on the
|
|||
[labels page](https://gitlab.com/gitlab-org/gitlab/-/labels).
|
||||
|
||||
If you come across an issue that has none of these, and you're allowed to set
|
||||
labels, you can _always_ add the team and type, and often also the subject.
|
||||
labels, you can _always_ add the type, stage, group, and often the category/feature labels.
|
||||
|
||||
### Type labels
|
||||
|
||||
|
|
@ -75,7 +75,7 @@ A number of type labels have a priority assigned to them, which automatically
|
|||
makes them float to the top, depending on their importance.
|
||||
|
||||
Type labels are always lowercase, and can have any color, besides blue (which is
|
||||
already reserved for subject labels).
|
||||
already reserved for category labels).
|
||||
|
||||
The descriptions on the [labels page](https://gitlab.com/groups/gitlab-org/-/labels)
|
||||
explain what falls under each type label.
|
||||
|
|
|
|||
|
|
@ -476,7 +476,7 @@ as the list item. This can be done with:
|
|||
Items nested in lists should always align with the first character of the list item.
|
||||
In unordered lists (using `-`), this means two spaces for each level of indentation:
|
||||
|
||||
~~~md
|
||||
````markdown
|
||||
- Unordered list item 1
|
||||
|
||||
A line nested using 2 spaces to align with the `U` above.
|
||||
|
|
@ -495,11 +495,11 @@ In unordered lists (using `-`), this means two spaces for each level of indentat
|
|||
- Unordered list item 4
|
||||
|
||||

|
||||
~~~
|
||||
````
|
||||
|
||||
For ordered lists, use three spaces for each level of indentation:
|
||||
|
||||
~~~md
|
||||
````markdown
|
||||
1. Ordered list item 1
|
||||
|
||||
A line nested using 3 spaces to align with the `O` above.
|
||||
|
|
@ -518,7 +518,7 @@ For ordered lists, use three spaces for each level of indentation:
|
|||
1. Ordered list item 4
|
||||
|
||||

|
||||
~~~
|
||||
````
|
||||
|
||||
You can nest full lists inside other lists using the same rules as above. If you wish
|
||||
to mix types, that is also possible, as long as you don't mix items at the same level:
|
||||
|
|
@ -1364,7 +1364,7 @@ on this document. Further explanation is given below.
|
|||
|
||||
The following can be used as a template to get started:
|
||||
|
||||
~~~md
|
||||
````markdown
|
||||
## Descriptive title
|
||||
|
||||
One or two sentence description of what endpoint does.
|
||||
|
|
@ -1392,7 +1392,7 @@ Example response:
|
|||
}
|
||||
]
|
||||
```
|
||||
~~~
|
||||
````
|
||||
|
||||
### Fake tokens
|
||||
|
||||
|
|
|
|||
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 16 KiB |
|
|
@ -198,6 +198,35 @@ An approval is optional when a license report:
|
|||
- Contains no software license violations.
|
||||
- Contains only new licenses that are `approved` or unknown.
|
||||
|
||||
## Outdated security reports
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/4913) in GitLab 12.7.
|
||||
|
||||
When a security report generated for a merge request becomes outdated, the merge request shows a warning
|
||||
message in the security widget and prompts you to take an appropriate action.
|
||||
|
||||
This can happen in two scenarios:
|
||||
|
||||
1. Your [source branch is behind the target branch](#source-branch-is-behind-the-target-branch).
|
||||
1. The [target branch security report is out of date](#target-branch-security-report-is-out-of-date).
|
||||
|
||||
### Source branch is behind the target branch
|
||||
|
||||
This means the most recent common ancestor commit between the target branch and the source branch is
|
||||
not the most recent commit on the target branch. This is by far the most common situation.
|
||||
|
||||
In this case you must rebase or merge to incorporate the changes from the target branch.
|
||||
|
||||

|
||||
|
||||
### Target branch security report is out of date
|
||||
|
||||
This can happen for many reasons, including failed jobs or new advisories. When the merge request shows that a
|
||||
security report is out of date, you must run a new pipeline on the target branch.
|
||||
You can do it quickly by following the hyperlink given to run a new pipeline.
|
||||
|
||||

|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Getting error message `sast job: stage parameter should be [some stage name here]`
|
||||
|
|
|
|||
|
|
@ -282,11 +282,11 @@ source - a listing that is embellished with (colorized) syntax highlighting
|
|||
----
|
||||
```
|
||||
|
||||
~~~asciidoc
|
||||
````asciidoc
|
||||
\```language
|
||||
fenced code - a shorthand syntax for the source block
|
||||
\```
|
||||
~~~
|
||||
````
|
||||
|
||||
```asciidoc
|
||||
[,attribution,citetitle]
|
||||
|
|
|
|||
|
|
@ -165,7 +165,7 @@ Visit the [official page](https://mermaidjs.github.io/) for more details. If you
|
|||
|
||||
In order to generate a diagram or flowchart, you should write your text inside the `mermaid` block:
|
||||
|
||||
~~~
|
||||
````markdown
|
||||
```mermaid
|
||||
graph TD;
|
||||
A-->B;
|
||||
|
|
@ -173,7 +173,7 @@ graph TD;
|
|||
B-->D;
|
||||
C-->D;
|
||||
```
|
||||
~~~
|
||||
````
|
||||
|
||||
```mermaid
|
||||
graph TD;
|
||||
|
|
@ -185,7 +185,7 @@ graph TD;
|
|||
|
||||
Subgraphs can also be included:
|
||||
|
||||
~~~
|
||||
````markdown
|
||||
```mermaid
|
||||
graph TB
|
||||
|
||||
|
|
@ -202,7 +202,7 @@ graph TB
|
|||
SubGraph1 --> FinalThing[Final Thing]
|
||||
end
|
||||
```
|
||||
~~~
|
||||
````
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
|
|
@ -280,27 +280,27 @@ The following delimiters are supported:
|
|||
|
||||
- YAML (`---`):
|
||||
|
||||
~~~yaml
|
||||
```yaml
|
||||
---
|
||||
title: About Front Matter
|
||||
example:
|
||||
language: yaml
|
||||
---
|
||||
~~~
|
||||
```
|
||||
|
||||
- TOML (`+++`):
|
||||
|
||||
~~~toml
|
||||
```toml
|
||||
+++
|
||||
title = "About Front Matter"
|
||||
[example]
|
||||
language = "toml"
|
||||
+++
|
||||
~~~
|
||||
```
|
||||
|
||||
- JSON (`;;;`):
|
||||
|
||||
~~~json
|
||||
```json
|
||||
;;;
|
||||
{
|
||||
"title": "About Front Matter"
|
||||
|
|
@ -309,7 +309,7 @@ The following delimiters are supported:
|
|||
}
|
||||
}
|
||||
;;;
|
||||
~~~
|
||||
```
|
||||
|
||||
Other languages are supported by adding a specifier to any of the existing
|
||||
delimiters. For example:
|
||||
|
|
@ -364,7 +364,7 @@ Math written between dollar signs `$` will be rendered inline with the text. Mat
|
|||
inside a [code block](#code-spans-and-blocks) with the language declared as `math`, will be rendered
|
||||
on a separate line:
|
||||
|
||||
~~~
|
||||
````markdown
|
||||
This math is inline $`a^2+b^2=c^2`$.
|
||||
|
||||
This is on a separate line
|
||||
|
|
@ -372,7 +372,7 @@ This is on a separate line
|
|||
```math
|
||||
a^2+b^2=c^2
|
||||
```
|
||||
~~~
|
||||
````
|
||||
|
||||
This math is inline $`a^2+b^2=c^2`$.
|
||||
|
||||
|
|
@ -613,12 +613,12 @@ Inline `code` has `back-ticks around` it.
|
|||
|
||||
---
|
||||
|
||||
Similarly, a whole block of code can be fenced with triple backticks ```` ``` ````,
|
||||
Similarly, a whole block of code can be fenced with triple backticks (```` ``` ````),
|
||||
triple tildes (`~~~`), or indented 4 or more spaces to achieve a similar effect for
|
||||
a larger body of code.
|
||||
|
||||
~~~
|
||||
```
|
||||
````markdown
|
||||
```python
|
||||
def function():
|
||||
#indenting works just fine in the fenced code block
|
||||
s = "Python code"
|
||||
|
|
@ -628,7 +628,7 @@ def function():
|
|||
Using 4 spaces
|
||||
is like using
|
||||
3-backtick fences.
|
||||
~~~
|
||||
````
|
||||
|
||||
```plaintext
|
||||
~~~
|
||||
|
|
@ -651,9 +651,9 @@ is like using
|
|||
3-backtick fences.
|
||||
```
|
||||
|
||||
~~~plaintext
|
||||
```plaintext
|
||||
Tildes are OK too.
|
||||
~~~
|
||||
```
|
||||
|
||||
#### Colored code and syntax highlighting
|
||||
|
||||
|
|
@ -665,10 +665,10 @@ highlighting in code blocks. For a list of supported languages visit the
|
|||
Syntax highlighting is only supported in code blocks, it is not possible to highlight
|
||||
code when it is inline.
|
||||
|
||||
Blocks of code are fenced by lines with three back-ticks ```` ``` ```` or three tildes `~~~`, and have
|
||||
Blocks of code are fenced by lines with three back-ticks (```` ``` ````) or three tildes (`~~~`), and have
|
||||
the language identified at the end of the first fence:
|
||||
|
||||
~~~markdown
|
||||
````markdown
|
||||
```javascript
|
||||
var s = "JavaScript syntax highlighting";
|
||||
alert(s);
|
||||
|
|
@ -692,7 +692,7 @@ No language indicated, so no syntax highlighting.
|
|||
s = "There is no highlighting for this."
|
||||
But let's throw in a <b>tag</b>.
|
||||
```
|
||||
~~~
|
||||
````
|
||||
|
||||
The four examples above render as:
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ code_quality:
|
|||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: ""
|
||||
CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/security-products/codequality:0.85.6"
|
||||
CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/security-products/codequality:0.85.9"
|
||||
script:
|
||||
- |
|
||||
if ! docker info &>/dev/null; then
|
||||
|
|
|
|||
|
|
@ -105,6 +105,17 @@ module Gitlab
|
|||
.+?
|
||||
\n```$
|
||||
)
|
||||
|
|
||||
(?<inline_code>
|
||||
# Inline code on separate rows:
|
||||
# `
|
||||
# Anything, including `/cmd arg` which are ignored by this filter
|
||||
# `
|
||||
|
||||
^.*`\n*
|
||||
.+?
|
||||
\n*`$
|
||||
)
|
||||
|
|
||||
(?<html>
|
||||
# HTML block:
|
||||
|
|
|
|||
|
|
@ -8,13 +8,14 @@ namespace :gitlab do
|
|||
OUTPUT_DIR = Rails.root.join("doc/api/graphql/reference")
|
||||
TEMPLATES_DIR = 'lib/gitlab/graphql/docs/templates/'
|
||||
|
||||
# Consider all feature flags disabled
|
||||
# to avoid pipeline failures in case developer
|
||||
# dumps schema with flags enabled locally before pushing
|
||||
task disable_feature_flags: :environment do
|
||||
# Make all feature flags enabled so that all feature flag
|
||||
# controlled fields are considered visible and are output.
|
||||
# Also avoids pipeline failures in case developer
|
||||
# dumps schema with flags disabled locally before pushing
|
||||
task enable_feature_flags: :environment do
|
||||
class Feature
|
||||
def self.enabled?(*args)
|
||||
false
|
||||
true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -25,7 +26,7 @@ namespace :gitlab do
|
|||
# - gitlab:graphql:schema:json
|
||||
GraphQL::RakeTask.new(
|
||||
schema_name: 'GitlabSchema',
|
||||
dependencies: [:environment, :disable_feature_flags],
|
||||
dependencies: [:environment, :enable_feature_flags],
|
||||
directory: OUTPUT_DIR,
|
||||
idl_outfile: "gitlab_schema.graphql",
|
||||
json_outfile: "gitlab_schema.json"
|
||||
|
|
@ -33,7 +34,7 @@ namespace :gitlab do
|
|||
|
||||
namespace :graphql do
|
||||
desc 'GitLab | GraphQL | Generate GraphQL docs'
|
||||
task compile_docs: :environment do
|
||||
task compile_docs: [:environment, :enable_feature_flags] do
|
||||
renderer = Gitlab::Graphql::Docs::Renderer.new(GitlabSchema.graphql_definition, render_options)
|
||||
|
||||
renderer.write
|
||||
|
|
@ -42,7 +43,7 @@ namespace :gitlab do
|
|||
end
|
||||
|
||||
desc 'GitLab | GraphQL | Check if GraphQL docs are up to date'
|
||||
task check_docs: :environment do
|
||||
task check_docs: [:environment, :enable_feature_flags] do
|
||||
renderer = Gitlab::Graphql::Docs::Renderer.new(GitlabSchema.graphql_definition, render_options)
|
||||
|
||||
doc = File.read(Rails.root.join(OUTPUT_DIR, 'index.md'))
|
||||
|
|
@ -56,7 +57,7 @@ namespace :gitlab do
|
|||
end
|
||||
|
||||
desc 'GitLab | GraphQL | Check if GraphQL schemas are up to date'
|
||||
task check_schema: :environment do
|
||||
task check_schema: [:environment, :enable_feature_flags] do
|
||||
idl_doc = File.read(Rails.root.join(OUTPUT_DIR, 'gitlab_schema.graphql'))
|
||||
json_doc = File.read(Rails.root.join(OUTPUT_DIR, 'gitlab_schema.json'))
|
||||
|
||||
|
|
|
|||
|
|
@ -7015,7 +7015,7 @@ msgstr ""
|
|||
msgid "Display source"
|
||||
msgstr ""
|
||||
|
||||
msgid "Displays dependencies and known vulnerabilities, based on the %{linkStart}latest pipeline%{linkEnd} scan"
|
||||
msgid "Displays dependencies and known vulnerabilities, based on the %{linkStart}latest successful%{linkEnd} scan"
|
||||
msgstr ""
|
||||
|
||||
msgid "Do not display offers from third parties within GitLab"
|
||||
|
|
@ -11724,7 +11724,7 @@ msgstr ""
|
|||
msgid "Licenses|Detected in Project"
|
||||
msgstr ""
|
||||
|
||||
msgid "Licenses|Displays licenses detected in the project, based on the %{linkStart}latest pipeline%{linkEnd} scan"
|
||||
msgid "Licenses|Displays licenses detected in the project, based on the %{linkStart}latest successful%{linkEnd} scan"
|
||||
msgstr ""
|
||||
|
||||
msgid "Licenses|Error fetching the license list. Please check your network connection and try again."
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@
|
|||
"apollo-link-batch-http": "^1.2.11",
|
||||
"apollo-upload-client": "^10.0.0",
|
||||
"autosize": "^4.0.2",
|
||||
"aws-sdk": "^2.526.0",
|
||||
"aws-sdk": "^2.637.0",
|
||||
"axios": "^0.19.0",
|
||||
"babel-loader": "^8.0.6",
|
||||
"babel-plugin-lodash": "^3.3.4",
|
||||
|
|
|
|||
|
|
@ -1140,7 +1140,7 @@ describe ProjectsController do
|
|||
end
|
||||
|
||||
it 'prevents requesting project export' do
|
||||
get action, params: { namespace_id: project.namespace, id: project }
|
||||
post action, params: { namespace_id: project.namespace, id: project }
|
||||
|
||||
expect(flash[:alert]).to eq('This endpoint has been requested too many times. Try again later.')
|
||||
expect(response).to have_gitlab_http_status(:found)
|
||||
|
|
@ -1152,7 +1152,7 @@ describe ProjectsController do
|
|||
|
||||
context 'when project export is enabled' do
|
||||
it 'returns 302' do
|
||||
get action, params: { namespace_id: project.namespace, id: project }
|
||||
post action, params: { namespace_id: project.namespace, id: project }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:found)
|
||||
end
|
||||
|
|
@ -1164,7 +1164,7 @@ describe ProjectsController do
|
|||
end
|
||||
|
||||
it 'returns 404' do
|
||||
get action, params: { namespace_id: project.namespace, id: project }
|
||||
post action, params: { namespace_id: project.namespace, id: project }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
FactoryBot.define do
|
||||
factory :project_export_job do
|
||||
project
|
||||
jid { SecureRandom.hex(8) }
|
||||
end
|
||||
end
|
||||
|
|
@ -94,8 +94,31 @@ describe 'Mermaid rendering', :js do
|
|||
page.find('summary').click
|
||||
svg = page.find('svg.mermaid')
|
||||
|
||||
expect(svg[:width].to_i).to be_within(5).of(120)
|
||||
expect(svg[:height].to_i).to be_within(5).of(220)
|
||||
expect(svg[:style]).to match(/max-width/)
|
||||
expect(svg[:width].to_i).to eq(100)
|
||||
expect(svg[:height].to_i).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
it 'correctly sizes mermaid diagram block', :js do
|
||||
description = <<~MERMAID
|
||||
```mermaid
|
||||
graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;
|
||||
```
|
||||
MERMAID
|
||||
|
||||
project = create(:project, :public)
|
||||
issue = create(:issue, project: project, description: description)
|
||||
|
||||
visit project_issue_path(project, issue)
|
||||
|
||||
svg = page.find('svg.mermaid')
|
||||
expect(svg[:style]).to match(/max-width/)
|
||||
expect(svg[:width].to_i).to eq(100)
|
||||
expect(svg[:height].to_i).to eq(0)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,51 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Projects::ExportJobFinder do
|
||||
let(:project) { create(:project) }
|
||||
let(:project_export_job1) { create(:project_export_job, project: project) }
|
||||
let(:project_export_job2) { create(:project_export_job, project: project) }
|
||||
|
||||
describe '#execute' do
|
||||
subject { described_class.new(project, params).execute }
|
||||
|
||||
context 'when queried for a project' do
|
||||
let(:params) { {} }
|
||||
|
||||
it 'scopes to the project' do
|
||||
expect(subject).to contain_exactly(
|
||||
project_export_job1, project_export_job2
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when queried by job id' do
|
||||
let(:params) { { jid: project_export_job1.jid } }
|
||||
|
||||
it 'filters records' do
|
||||
expect(subject).to contain_exactly(project_export_job1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when queried by status' do
|
||||
let(:params) { { status: :started } }
|
||||
|
||||
before do
|
||||
project_export_job2.start!
|
||||
end
|
||||
|
||||
it 'filters records' do
|
||||
expect(subject).to contain_exactly(project_export_job2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when queried by invalid status' do
|
||||
let(:params) { { status: '1234ad' } }
|
||||
|
||||
it 'raises exception' do
|
||||
expect { subject }.to raise_error(described_class::InvalidExportJobStatusError, 'Invalid export job status')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -13,9 +13,10 @@
|
|||
"type": "string",
|
||||
"enum": [
|
||||
"none",
|
||||
"queued",
|
||||
"started",
|
||||
"finished",
|
||||
"after_export_action"
|
||||
"regeneration_in_progress"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,50 @@
|
|||
import $ from 'jquery';
|
||||
import BlobFileDropzone from '~/blob/blob_file_dropzone';
|
||||
|
||||
describe('BlobFileDropzone', () => {
|
||||
preloadFixtures('blob/show.html');
|
||||
let dropzone;
|
||||
let replaceFileButton;
|
||||
const jQueryMock = {
|
||||
enable: jest.fn(),
|
||||
disable: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
loadFixtures('blob/show.html');
|
||||
const form = $('.js-upload-blob-form');
|
||||
// eslint-disable-next-line no-new
|
||||
new BlobFileDropzone(form, 'POST');
|
||||
dropzone = $('.js-upload-blob-form .dropzone').get(0).dropzone;
|
||||
dropzone.processQueue = jest.fn();
|
||||
replaceFileButton = $('#submit-all');
|
||||
$.fn.extend(jQueryMock);
|
||||
});
|
||||
|
||||
describe('submit button', () => {
|
||||
it('requires file', () => {
|
||||
jest.spyOn(window, 'alert').mockImplementation(() => {});
|
||||
|
||||
replaceFileButton.click();
|
||||
|
||||
expect(window.alert).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('is disabled while uploading', () => {
|
||||
jest.spyOn(window, 'alert').mockImplementation(() => {});
|
||||
|
||||
const file = new File([], 'some-file.jpg');
|
||||
const fakeEvent = $.Event('drop', {
|
||||
dataTransfer: { files: [file] },
|
||||
});
|
||||
|
||||
dropzone.listeners[0].events.drop(fakeEvent);
|
||||
|
||||
replaceFileButton.click();
|
||||
|
||||
expect(window.alert).not.toHaveBeenCalled();
|
||||
expect(jQueryMock.enable).toHaveBeenCalled();
|
||||
expect(dropzone.processQueue).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
import $ from 'jquery';
|
||||
import BlobFileDropzone from '~/blob/blob_file_dropzone';
|
||||
|
||||
describe('BlobFileDropzone', function() {
|
||||
preloadFixtures('blob/show.html');
|
||||
|
||||
beforeEach(() => {
|
||||
loadFixtures('blob/show.html');
|
||||
const form = $('.js-upload-blob-form');
|
||||
this.blobFileDropzone = new BlobFileDropzone(form, 'POST');
|
||||
this.dropzone = $('.js-upload-blob-form .dropzone').get(0).dropzone;
|
||||
this.replaceFileButton = $('#submit-all');
|
||||
});
|
||||
|
||||
describe('submit button', () => {
|
||||
it('requires file', () => {
|
||||
spyOn(window, 'alert');
|
||||
|
||||
this.replaceFileButton.click();
|
||||
|
||||
expect(window.alert).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('is disabled while uploading', () => {
|
||||
spyOn(window, 'alert');
|
||||
|
||||
const file = new File([], 'some-file.jpg');
|
||||
const fakeEvent = $.Event('drop', {
|
||||
dataTransfer: { files: [file] },
|
||||
});
|
||||
|
||||
this.dropzone.listeners[0].events.drop(fakeEvent);
|
||||
this.replaceFileButton.click();
|
||||
|
||||
expect(window.alert).not.toHaveBeenCalled();
|
||||
expect(this.replaceFileButton.is(':disabled')).toEqual(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -3,6 +3,12 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy do
|
||||
before do
|
||||
allow_next_instance_of(ProjectExportWorker) do |job|
|
||||
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
|
||||
end
|
||||
end
|
||||
|
||||
let!(:service) { described_class.new }
|
||||
let!(:project) { create(:project, :with_export) }
|
||||
let(:shared) { project.import_export_shared }
|
||||
|
|
|
|||
|
|
@ -5,6 +5,12 @@ require 'spec_helper'
|
|||
describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do
|
||||
include StubRequests
|
||||
|
||||
before do
|
||||
allow_next_instance_of(ProjectExportWorker) do |job|
|
||||
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
|
||||
end
|
||||
end
|
||||
|
||||
let(:example_url) { 'http://www.example.com' }
|
||||
let(:strategy) { subject.new(url: example_url, http_method: 'post') }
|
||||
let!(:project) { create(:project, :with_export) }
|
||||
|
|
|
|||
|
|
@ -469,6 +469,7 @@ project:
|
|||
- autoclose_referenced_issues
|
||||
- status_page_setting
|
||||
- requirements
|
||||
- export_jobs
|
||||
award_emoji:
|
||||
- awardable
|
||||
- user
|
||||
|
|
|
|||
|
|
@ -291,6 +291,33 @@ describe Gitlab::QuickActions::Extractor do
|
|||
expect(msg).to eq expected
|
||||
end
|
||||
|
||||
it 'does not extract commands in multiline inline code on seperated rows' do
|
||||
msg = "Hello\r\n`\r\nThis is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
|
||||
expected = msg.delete("\r")
|
||||
msg, commands = extractor.extract_commands(msg)
|
||||
|
||||
expect(commands).to be_empty
|
||||
expect(msg).to eq expected
|
||||
end
|
||||
|
||||
it 'does not extract commands in multiline inline code starting from text' do
|
||||
msg = "Hello `This is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
|
||||
expected = msg.delete("\r")
|
||||
msg, commands = extractor.extract_commands(msg)
|
||||
|
||||
expect(commands).to be_empty
|
||||
expect(msg).to eq expected
|
||||
end
|
||||
|
||||
it 'does not extract commands in inline code' do
|
||||
msg = "`This is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
|
||||
expected = msg.delete("\r")
|
||||
msg, commands = extractor.extract_commands(msg)
|
||||
|
||||
expect(commands).to be_empty
|
||||
expect(msg).to eq expected
|
||||
end
|
||||
|
||||
it 'limits to passed commands when they are passed' do
|
||||
msg = <<~MSG.strip
|
||||
Hello, we should only extract the commands passed
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe ProjectExportJob, type: :model do
|
||||
let(:project) { create(:project) }
|
||||
let!(:job1) { create(:project_export_job, project: project, status: 0) }
|
||||
let!(:job2) { create(:project_export_job, project: project, status: 2) }
|
||||
|
||||
describe 'associations' do
|
||||
it { expect(job1).to belong_to(:project) }
|
||||
end
|
||||
|
||||
describe 'validations' do
|
||||
it { expect(job1).to validate_presence_of(:project) }
|
||||
it { expect(job1).to validate_presence_of(:jid) }
|
||||
it { expect(job1).to validate_presence_of(:status) }
|
||||
end
|
||||
end
|
||||
|
|
@ -3957,6 +3957,12 @@ describe Project do
|
|||
describe '#remove_export' do
|
||||
let(:project) { create(:project, :with_export) }
|
||||
|
||||
before do
|
||||
allow_next_instance_of(ProjectExportWorker) do |job|
|
||||
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
|
||||
end
|
||||
end
|
||||
|
||||
it 'removes the export' do
|
||||
project.remove_exports
|
||||
|
||||
|
|
@ -5813,6 +5819,86 @@ describe Project do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#add_export_job' do
|
||||
context 'if not already present' do
|
||||
it 'starts project export job' do
|
||||
user = create(:user)
|
||||
project = build(:project)
|
||||
|
||||
expect(ProjectExportWorker).to receive(:perform_async).with(user.id, project.id, nil, {})
|
||||
|
||||
project.add_export_job(current_user: user)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#export_in_progress?' do
|
||||
let(:project) { build(:project) }
|
||||
let!(:project_export_job ) { create(:project_export_job, project: project) }
|
||||
|
||||
context 'when project export is enqueued' do
|
||||
it { expect(project.export_in_progress?).to be false }
|
||||
end
|
||||
|
||||
context 'when project export is in progress' do
|
||||
before do
|
||||
project_export_job.start!
|
||||
end
|
||||
|
||||
it { expect(project.export_in_progress?).to be true }
|
||||
end
|
||||
|
||||
context 'when project export is completed' do
|
||||
before do
|
||||
finish_job(project_export_job)
|
||||
end
|
||||
|
||||
it { expect(project.export_in_progress?).to be false }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#export_status' do
|
||||
let(:project) { build(:project) }
|
||||
let!(:project_export_job ) { create(:project_export_job, project: project) }
|
||||
|
||||
context 'when project export is enqueued' do
|
||||
it { expect(project.export_status).to eq :queued }
|
||||
end
|
||||
|
||||
context 'when project export is in progress' do
|
||||
before do
|
||||
project_export_job.start!
|
||||
end
|
||||
|
||||
it { expect(project.export_status).to eq :started }
|
||||
end
|
||||
|
||||
context 'when project export is completed' do
|
||||
before do
|
||||
finish_job(project_export_job)
|
||||
allow(project).to receive(:export_file).and_return(double(ImportExportUploader, file: 'exists.zip'))
|
||||
end
|
||||
|
||||
it { expect(project.export_status).to eq :finished }
|
||||
end
|
||||
|
||||
context 'when project export is being regenerated' do
|
||||
let!(:new_project_export_job ) { create(:project_export_job, project: project) }
|
||||
|
||||
before do
|
||||
finish_job(project_export_job)
|
||||
allow(project).to receive(:export_file).and_return(double(ImportExportUploader, file: 'exists.zip'))
|
||||
end
|
||||
|
||||
it { expect(project.export_status).to eq :regeneration_in_progress }
|
||||
end
|
||||
end
|
||||
|
||||
def finish_job(export_job)
|
||||
export_job.start
|
||||
export_job.finish
|
||||
end
|
||||
|
||||
def rugged_config
|
||||
rugged_repo(project.repository).config
|
||||
end
|
||||
|
|
|
|||
|
|
@ -27,12 +27,9 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
|
|||
|
||||
before do
|
||||
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
|
||||
|
||||
# simulate exporting work directory
|
||||
FileUtils.mkdir_p File.join(project_started.export_path, 'securerandom-hex')
|
||||
|
||||
# simulate in after export action
|
||||
FileUtils.touch File.join(project_after_export.import_export_shared.lock_files_path, SecureRandom.hex)
|
||||
allow_next_instance_of(ProjectExportWorker) do |job|
|
||||
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
|
||||
end
|
||||
end
|
||||
|
||||
after do
|
||||
|
|
@ -82,28 +79,42 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
|
|||
expect(json_response['export_status']).to eq('none')
|
||||
end
|
||||
|
||||
it 'is started' do
|
||||
get api(path_started, user)
|
||||
context 'when project export has started' do
|
||||
before do
|
||||
create(:project_export_job, project: project_started, status: 1)
|
||||
end
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to match_response_schema('public_api/v4/project/export_status')
|
||||
expect(json_response['export_status']).to eq('started')
|
||||
it 'returns status started' do
|
||||
get api(path_started, user)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to match_response_schema('public_api/v4/project/export_status')
|
||||
expect(json_response['export_status']).to eq('started')
|
||||
end
|
||||
end
|
||||
|
||||
it 'is after_export' do
|
||||
get api(path_after_export, user)
|
||||
context 'when project export has finished' do
|
||||
it 'returns status finished' do
|
||||
get api(path_finished, user)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to match_response_schema('public_api/v4/project/export_status')
|
||||
expect(json_response['export_status']).to eq('after_export_action')
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to match_response_schema('public_api/v4/project/export_status')
|
||||
expect(json_response['export_status']).to eq('finished')
|
||||
end
|
||||
end
|
||||
|
||||
it 'is finished' do
|
||||
get api(path_finished, user)
|
||||
context 'when project export is being regenerated' do
|
||||
before do
|
||||
create(:project_export_job, project: project_finished, status: 1)
|
||||
end
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to match_response_schema('public_api/v4/project/export_status')
|
||||
expect(json_response['export_status']).to eq('finished')
|
||||
it 'returns status regeneration_in_progress' do
|
||||
get api(path_finished, user)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to match_response_schema('public_api/v4/project/export_status')
|
||||
expect(json_response['export_status']).to eq('regeneration_in_progress')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -362,6 +362,26 @@ describe Ci::CreateCrossProjectPipelineService, '#execute' do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when bridge job status update raises state machine errors' do
|
||||
let(:stub_config) { false }
|
||||
|
||||
before do
|
||||
stub_ci_pipeline_yaml_file(YAML.dump(invalid: { yaml: 'error' }))
|
||||
bridge.drop!
|
||||
end
|
||||
|
||||
it 'tracks the exception' do
|
||||
expect(Gitlab::ErrorTracking)
|
||||
.to receive(:track_exception)
|
||||
.with(
|
||||
instance_of(Ci::Bridge::InvalidTransitionError),
|
||||
bridge_id: bridge.id,
|
||||
downstream_pipeline_id: kind_of(Numeric))
|
||||
|
||||
service.execute(bridge)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when bridge job has YAML variables defined' do
|
||||
before do
|
||||
bridge.yaml_variables = [{ key: 'BRIDGE', value: 'var', public: true }]
|
||||
|
|
|
|||
|
|
@ -22,6 +22,24 @@ describe Ci::PipelineBridgeStatusService do
|
|||
|
||||
subject
|
||||
end
|
||||
|
||||
context 'when bridge job status raises state machine errors' do
|
||||
before do
|
||||
pipeline.drop!
|
||||
bridge.drop!
|
||||
end
|
||||
|
||||
it 'tracks the exception' do
|
||||
expect(Gitlab::ErrorTracking)
|
||||
.to receive(:track_exception)
|
||||
.with(
|
||||
instance_of(Ci::Bridge::InvalidTransitionError),
|
||||
bridge_id: bridge.id,
|
||||
downstream_pipeline_id: pipeline.id)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,41 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe ProjectExportOptions do
|
||||
let(:project) { create(:project) }
|
||||
let(:project_export_job) { create(:project_export_job, project: project, jid: '123', status: 1) }
|
||||
let(:job) { { 'args' => [project.owner.id, project.id, nil, nil], 'jid' => '123' } }
|
||||
let(:worker_class) do
|
||||
Class.new do
|
||||
include Sidekiq::Worker
|
||||
include ProjectExportOptions
|
||||
end
|
||||
end
|
||||
|
||||
it 'sets default retry limit' do
|
||||
expect(worker_class.sidekiq_options['retry']).to eq(ProjectExportOptions::EXPORT_RETRY_COUNT)
|
||||
end
|
||||
|
||||
it 'sets default status expiration' do
|
||||
expect(worker_class.sidekiq_options['status_expiration']).to eq(StuckExportJobsWorker::EXPORT_JOBS_EXPIRATION)
|
||||
end
|
||||
|
||||
describe '.sidekiq_retries_exhausted' do
|
||||
it 'marks status as failed' do
|
||||
expect { worker_class.sidekiq_retries_exhausted_block.call(job) }.to change { project_export_job.reload.status }.from(1).to(3)
|
||||
end
|
||||
|
||||
context 'when status update fails' do
|
||||
before do
|
||||
project_export_job.update(status: 2)
|
||||
end
|
||||
|
||||
it 'logs an error' do
|
||||
expect(Sidekiq.logger).to receive(:error).with("Failed to set Job #{job['jid']} for project #{project.id} to failed state")
|
||||
|
||||
worker_class.sidekiq_retries_exhausted_block.call(job)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -9,21 +9,59 @@ describe ProjectExportWorker do
|
|||
subject { described_class.new }
|
||||
|
||||
describe '#perform' do
|
||||
before do
|
||||
allow_next_instance_of(described_class) do |job|
|
||||
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
|
||||
end
|
||||
end
|
||||
|
||||
context 'when it succeeds' do
|
||||
it 'calls the ExportService' do
|
||||
expect_any_instance_of(::Projects::ImportExport::ExportService).to receive(:execute)
|
||||
|
||||
subject.perform(user.id, project.id, { 'klass' => 'Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy' })
|
||||
end
|
||||
|
||||
context 'export job' do
|
||||
before do
|
||||
allow_any_instance_of(::Projects::ImportExport::ExportService).to receive(:execute)
|
||||
end
|
||||
|
||||
it 'creates an export job record for the project' do
|
||||
expect { subject.perform(user.id, project.id, {}) }.to change { project.export_jobs.count }.from(0).to(1)
|
||||
end
|
||||
|
||||
it 'sets the export job status to started' do
|
||||
expect_next_instance_of(ProjectExportJob) do |job|
|
||||
expect(job).to receive(:start)
|
||||
end
|
||||
|
||||
subject.perform(user.id, project.id, {})
|
||||
end
|
||||
|
||||
it 'sets the export job status to finished' do
|
||||
expect_next_instance_of(ProjectExportJob) do |job|
|
||||
expect(job).to receive(:finish)
|
||||
end
|
||||
|
||||
subject.perform(user.id, project.id, {})
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when it fails' do
|
||||
it 'raises an exception when params are invalid' do
|
||||
it 'does not raise an exception when strategy is invalid' do
|
||||
expect_any_instance_of(::Projects::ImportExport::ExportService).not_to receive(:execute)
|
||||
|
||||
expect { subject.perform(1234, project.id, {}) }.to raise_exception(ActiveRecord::RecordNotFound)
|
||||
expect { subject.perform(user.id, 1234, {}) }.to raise_exception(ActiveRecord::RecordNotFound)
|
||||
expect { subject.perform(user.id, project.id, { 'klass' => 'Whatever' }) }.to raise_exception(Gitlab::ImportExport::AfterExportStrategyBuilder::StrategyNotFoundError)
|
||||
expect { subject.perform(user.id, project.id, { 'klass' => 'Whatever' }) }.not_to raise_error
|
||||
end
|
||||
|
||||
it 'does not raise error when project cannot be found' do
|
||||
expect { subject.perform(user.id, -234, {}) }.not_to raise_error
|
||||
end
|
||||
|
||||
it 'does not raise error when user cannot be found' do
|
||||
expect { subject.perform(-863, project.id, {}) }.not_to raise_error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,75 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe StuckExportJobsWorker do
|
||||
let(:worker) { described_class.new }
|
||||
|
||||
shared_examples 'project export job detection' do
|
||||
context 'when the job has completed' do
|
||||
context 'when the export status was already updated' do
|
||||
before do
|
||||
allow(Gitlab::SidekiqStatus).to receive(:completed_jids) do
|
||||
project_export_job.start
|
||||
project_export_job.finish
|
||||
|
||||
[project_export_job.jid]
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not mark the export as failed' do
|
||||
worker.perform
|
||||
|
||||
expect(project_export_job.reload.finished?).to be true
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the export status was not updated' do
|
||||
before do
|
||||
allow(Gitlab::SidekiqStatus).to receive(:completed_jids) do
|
||||
project_export_job.start
|
||||
|
||||
[project_export_job.jid]
|
||||
end
|
||||
end
|
||||
|
||||
it 'marks the project as failed' do
|
||||
worker.perform
|
||||
|
||||
expect(project_export_job.reload.failed?).to be true
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the job is not in queue and db record in queued state' do
|
||||
before do
|
||||
allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([project_export_job.jid])
|
||||
end
|
||||
|
||||
it 'marks the project as failed' do
|
||||
expect(project_export_job.queued?).to be true
|
||||
|
||||
worker.perform
|
||||
|
||||
expect(project_export_job.reload.failed?).to be true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the job is running in Sidekiq' do
|
||||
before do
|
||||
allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([])
|
||||
end
|
||||
|
||||
it 'does not mark the project export as failed' do
|
||||
expect { worker.perform }.not_to change { project_export_job.reload.status }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'with started export status' do
|
||||
it_behaves_like 'project export job detection' do
|
||||
let(:project) { create(:project) }
|
||||
let!(:project_export_job) { create(:project_export_job, project: project, jid: '123') }
|
||||
end
|
||||
end
|
||||
end
|
||||
18
yarn.lock
18
yarn.lock
|
|
@ -1866,14 +1866,14 @@ autosize@^4.0.2:
|
|||
resolved "https://registry.yarnpkg.com/autosize/-/autosize-4.0.2.tgz#073cfd07c8bf45da4b9fd153437f5bafbba1e4c9"
|
||||
integrity sha512-jnSyH2d+qdfPGpWlcuhGiHmqBJ6g3X+8T+iRwFrHPLVcdoGJE/x6Qicm6aDHfTsbgZKxyV8UU/YB2p4cjKDRRA==
|
||||
|
||||
aws-sdk@^2.526.0:
|
||||
version "2.526.0"
|
||||
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.526.0.tgz#e0f899be59edb7d50eb8cca7978bcd401a5d48c2"
|
||||
integrity sha512-ZZqf8AnD9A8ZJd/4oU711R8taxm8sV7wcAOvT0HhrZxv8zASAzoz2lpZ19QAil6uJ52IOkq4ij/zGy7VBXEgPA==
|
||||
aws-sdk@^2.637.0:
|
||||
version "2.637.0"
|
||||
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.637.0.tgz#810e25e53acf2250d35fc74498f9d4492e154217"
|
||||
integrity sha512-e7EYX5rNtQyEaleQylUtLSNKXOmvOwfifQ4bYkfF80mFsVI3DSydczLHXrqPzXoEJaS/GI/9HqVnlQcPs6Q3ew==
|
||||
dependencies:
|
||||
buffer "4.9.1"
|
||||
events "1.1.1"
|
||||
ieee754 "1.1.8"
|
||||
ieee754 "1.1.13"
|
||||
jmespath "0.15.0"
|
||||
querystring "0.2.0"
|
||||
sax "1.2.1"
|
||||
|
|
@ -5752,10 +5752,10 @@ icss-utils@^2.1.0:
|
|||
dependencies:
|
||||
postcss "^6.0.1"
|
||||
|
||||
ieee754@1.1.8, ieee754@^1.1.4:
|
||||
version "1.1.8"
|
||||
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.8.tgz#be33d40ac10ef1926701f6f08a2d86fbfd1ad3e4"
|
||||
integrity sha1-vjPUCsEO8ZJnAfbwii2G+/0a0+Q=
|
||||
ieee754@1.1.13, ieee754@^1.1.4:
|
||||
version "1.1.13"
|
||||
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84"
|
||||
integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==
|
||||
|
||||
iferr@^0.1.5:
|
||||
version "0.1.5"
|
||||
|
|
|
|||
Loading…
Reference in New Issue