Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-12-14 15:13:45 +00:00
parent 89a0c1fa66
commit d7e72d98df
41 changed files with 399 additions and 174 deletions

View File

@ -42,7 +42,7 @@ Are there any other stages or teams involved that need to be kept in the loop?
the feature can be officially announced in a release blog post.
- [ ] `/chatops run auto_deploy status <merge-commit-of-cleanup-mr>`
- [ ] Close [the feature issue](ISSUE LINK) to indicate the feature will be released in the current milestone.
- [ ] If not already done, clean up the feature flag from all environments by running these chatops command in `#production` channel: `/chatops run feature delete <feature-flag-name> --dev --ops --pre --staging --staging-ref --production`
- [ ] If not already done, clean up the feature flag from all environments by running these chatops command in `#production` channel: `/chatops run feature delete <feature-flag-name> --dev --pre --staging --staging-ref --production`
- [ ] Close this rollout issue.

View File

@ -32,10 +32,10 @@ Note: Please make sure to run the chatops commands in the Slack channel that get
- Verify the MR with the feature flag is merged to `master` and have been deployed to non-production environments with `/chatops run auto_deploy status <merge-commit-of-your-feature>`
<!-- Delete Incremental roll out if it is not relevant to this deploy -->
- [ ] Deploy the feature flag at a percentage (recommended percentage: 50%) with `/chatops run feature set <feature-flag-name> <rollout-percentage> --actors --dev --staging --staging-ref`
- [ ] Deploy the feature flag at a percentage (recommended percentage: 50%) with `/chatops run feature set <feature-flag-name> <rollout-percentage> --actors --dev --pre --staging --staging-ref`
- [ ] Monitor that the error rates did not increase (repeat with a different percentage as necessary).
<!-- End of block for deletes -->
- [ ] Enable the feature globally on non-production environments with `/chatops run feature set <feature-flag-name> true --dev --staging --staging-ref`
- [ ] Enable the feature globally on non-production environments with `/chatops run feature set <feature-flag-name> true --dev --pre --staging --staging-ref`
- [ ] Verify that the feature works as expected.
The best environment to validate the feature in is [`staging-canary`](https://about.gitlab.com/handbook/engineering/infrastructure/environments/#staging-canary)
as this is the first environment deployed to. Make sure you are [configured to use canary](https://next.gitlab.com/).
@ -103,7 +103,7 @@ To do so, follow these steps:
- [ ] Ensure that the default-enabling MR has been included in the release package.
If the merge request was deployed before [the monthly release was tagged](https://about.gitlab.com/handbook/engineering/releases/#self-managed-releases-1),
the feature can be officially announced in a release blog post: `/chatops run release check <merge-request-url> <milestone>`
- [ ] Consider cleaning up the feature flag from all environments by running these chatops command in `#production` channel. Otherwise these settings may override the default enabled: `/chatops run feature delete <feature-flag-name> --dev --staging --staging-ref --production`
- [ ] Consider cleaning up the feature flag from all environments by running these chatops command in `#production` channel. Otherwise these settings may override the default enabled: `/chatops run feature delete <feature-flag-name> --dev --pre --staging --staging-ref --production`
- [ ] Close [the feature issue][main-issue] to indicate the feature will be released in the current milestone.
- [ ] Set the next milestone to this rollout issue for scheduling [the flag removal](#release-the-feature).
- [ ] (Optional) You can [create a separate issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new?issuable_template=Feature%20Flag%20Cleanup) for scheduling the steps below to [Release the feature](#release-the-feature).
@ -132,7 +132,7 @@ You can either [create a follow-up issue for Feature Flag Cleanup](https://gitla
If the merge request was deployed before [the monthly release was tagged](https://about.gitlab.com/handbook/engineering/releases/#self-managed-releases-1),
the feature can be officially announced in a release blog post: `/chatops run release check <merge-request-url> <milestone>`
- [ ] Close [the feature issue][main-issue] to indicate the feature will be released in the current milestone.
- [ ] Clean up the feature flag from all environments by running these chatops command in `#production` channel: `/chatops run feature delete <feature-flag-name> --dev --ops --pre --staging --staging-ref --production`
- [ ] Clean up the feature flag from all environments by running these chatops command in `#production` channel: `/chatops run feature delete <feature-flag-name> --dev --pre --staging --staging-ref --production`
- [ ] Close this rollout issue.
## Rollback Steps

View File

@ -333,6 +333,10 @@
"load_performance": {
"$ref": "#/definitions/string_file_list",
"markdownDescription": "Path to file or list of files with load performance testing report(s). [Learn More](https://docs.gitlab.com/ee/ci/yaml/artifacts_reports.html#artifactsreportsload_performance)."
},
"repository_xray": {
"$ref": "#/definitions/string_file_list",
"description": "Path to file or list of files with Repository X-Ray report(s)."
}
}
}

View File

@ -32,6 +32,25 @@ module Resolvers
super
end
# :project level: no customization, returning the original resolver
# :group level: add the project_ids argument
def self.[](context = :project)
case context
when :project
self
when :group
Class.new(self) do
argument :project_ids, [GraphQL::Types::ID],
required: false,
description: 'Project IDs within the group hierarchy.'
define_method :finder_params do
{ group_id: object.id, include_subgroups: true }
end
end
end
end
end
end
end

View File

@ -25,25 +25,6 @@ module Resolvers
def finder_params
{ project_id: object.project.id }
end
# :project level: no customization, returning the original resolver
# :group level: add the project_ids argument
def self.[](context = :project)
case context
when :project
self
when :group
Class.new(self) do
argument :project_ids, [GraphQL::Types::ID],
required: false,
description: 'Project IDs within the group hierarchy.'
define_method :finder_params do
{ group_id: object.id, include_subgroups: true }
end
end
end
end
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
module Resolvers
module Analytics
module CycleAnalytics
class BaseMergeRequestResolver < BaseCountResolver
type Types::Analytics::CycleAnalytics::MetricType, null: true
argument :assignee_usernames, [GraphQL::Types::String],
required: false,
description: 'Usernames of users assigned to the merge request.'
argument :author_username, GraphQL::Types::String,
required: false,
description: 'Username of the author of the merge request.'
argument :milestone_title, GraphQL::Types::String,
required: false,
description: 'Milestone applied to the merge request.'
argument :label_names, [GraphQL::Types::String],
required: false,
description: 'Labels applied to the merge request.'
end
end
end
end

View File

@ -28,22 +28,6 @@ module Resolvers
finder.execute.count
end
# :project level: no customization, returning the original resolver
# :group level: add the project_ids argument
def self.[](context = :project)
case context
when :project
self
when :group
Class.new(self) do
argument :project_ids, [GraphQL::Types::ID],
required: false,
description: 'Project IDs within the group hierarchy.'
end
end
end
end
end
end

View File

@ -6,8 +6,6 @@ module Resolvers
class ResourceResolver < BaseResolver
include Gitlab::Graphql::Authorize::AuthorizeResource
authorize :read_code
type ::Types::Ci::Catalog::ResourceType, null: true
argument :id, ::Types::GlobalIDType[::Ci::Catalog::Resource],
@ -28,19 +26,15 @@ module Resolvers
end
def resolve(id: nil, full_path: nil)
if full_path.present?
project = Project.find_by_full_path(full_path)
authorize!(project)
catalog_resource = if full_path.present?
::Ci::Catalog::Listing.new(current_user).find_resource(full_path: full_path)
else
::Ci::Catalog::Listing.new(current_user).find_resource(id: id.model_id)
end
raise_resource_not_available_error! unless project.catalog_resource
raise_resource_not_available_error! unless catalog_resource
project.catalog_resource
else
catalog_resource = ::Gitlab::Graphql::Lazy.force(GitlabSchema.find_by_gid(id))
authorize!(catalog_resource&.project)
catalog_resource
end
catalog_resource
end
end
end

View File

@ -58,7 +58,8 @@ module Ci
coverage_fuzzing: 'gl-coverage-fuzzing.json',
api_fuzzing: 'gl-api-fuzzing-report.json',
cyclonedx: 'gl-sbom.cdx.json',
annotations: 'gl-annotations.json'
annotations: 'gl-annotations.json',
repository_xray: 'gl-repository-xray.json'
}.freeze
INTERNAL_TYPES = {
@ -78,6 +79,7 @@ module Ci
lsif: :zip,
cyclonedx: :gzip,
annotations: :gzip,
repository_xray: :gzip,
# Security reports and license scanning reports are raw artifacts
# because they used to be fetched by the frontend, but this is not the case anymore.
@ -221,7 +223,8 @@ module Ci
cluster_image_scanning: 27, ## EE-specific
cyclonedx: 28, ## EE-specific
requirements_v2: 29, ## EE-specific
annotations: 30
annotations: 30,
repository_xray: 31 ## EE-specifric
}
# `file_location` indicates where actual files are stored.

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/425272
milestone: '16.5'
type: development
group: group::source code
default_enabled: false
default_enabled: true

View File

@ -33,6 +33,8 @@
- 1
- - admin_emails
- 1
- - ai_store_repository_xray
- 1
- - analytics_code_review_metrics
- 1
- - analytics_devops_adoption_create_snapshot

12
db/docs/xray_reports.yml Normal file
View File

@ -0,0 +1,12 @@
---
table_name: xray_reports
classes:
- Projects::XrayReport
feature_categories:
- code_suggestions
description: The stored JSON output of repository X-Ray for a project
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/138220
milestone: '16.7'
gitlab_schema: gitlab_main_cell
sharding_key:
project_id: projects

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddProjectXrayReportModel < Gitlab::Database::Migration[2.2]
enable_lock_retries!
milestone '16.7'
def change
create_table :xray_reports, if_not_exists: true do |t|
# we create an index manually below, don't create one here
t.references :project, null: false, index: false, foreign_key: { on_delete: :cascade }
t.timestamps_with_timezone null: false
t.text :lang, null: false, limit: 255
t.jsonb :payload, null: false
t.binary :file_checksum, null: false
end
add_index :xray_reports, [:project_id, :lang], unique: true, name: 'index_xray_reports_on_project_id_and_lang'
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
class AddRepositoryXrayPlanLimit < Gitlab::Database::Migration[2.2]
enable_lock_retries!
milestone '16.7'
def change
add_column :plan_limits, :ci_max_artifact_size_repository_xray, :bigint, default: 1.gigabyte, null: false
end
end

View File

@ -0,0 +1 @@
0ee8b127bcdf66b2fe4639e8397d39052f61c16186b491039ce478f5b477a6a3

View File

@ -0,0 +1 @@
c5ccd76e1245234f4e78413d3afde72eb4c0c84ab723dffc6ac83abb619f43a9

View File

@ -21110,7 +21110,8 @@ CREATE TABLE plan_limits (
ci_job_annotations_size integer DEFAULT 81920 NOT NULL,
ci_job_annotations_num integer DEFAULT 20 NOT NULL,
file_size_limit_mb double precision DEFAULT 100.0 NOT NULL,
audit_events_amazon_s3_configurations integer DEFAULT 5 NOT NULL
audit_events_amazon_s3_configurations integer DEFAULT 5 NOT NULL,
ci_max_artifact_size_repository_xray bigint DEFAULT 1073741824 NOT NULL
);
CREATE SEQUENCE plan_limits_id_seq
@ -25765,6 +25766,26 @@ CREATE SEQUENCE x509_issuers_id_seq
ALTER SEQUENCE x509_issuers_id_seq OWNED BY x509_issuers.id;
CREATE TABLE xray_reports (
id bigint NOT NULL,
project_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
lang text NOT NULL,
payload jsonb NOT NULL,
file_checksum bytea NOT NULL,
CONSTRAINT check_6da5a3b473 CHECK ((char_length(lang) <= 255))
);
CREATE SEQUENCE xray_reports_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE xray_reports_id_seq OWNED BY xray_reports.id;
CREATE TABLE zentao_tracker_data (
id bigint NOT NULL,
integration_id bigint NOT NULL,
@ -27455,6 +27476,8 @@ ALTER TABLE ONLY x509_commit_signatures ALTER COLUMN id SET DEFAULT nextval('x50
ALTER TABLE ONLY x509_issuers ALTER COLUMN id SET DEFAULT nextval('x509_issuers_id_seq'::regclass);
ALTER TABLE ONLY xray_reports ALTER COLUMN id SET DEFAULT nextval('xray_reports_id_seq'::regclass);
ALTER TABLE ONLY zentao_tracker_data ALTER COLUMN id SET DEFAULT nextval('zentao_tracker_data_id_seq'::regclass);
ALTER TABLE ONLY zoekt_indexed_namespaces ALTER COLUMN id SET DEFAULT nextval('zoekt_indexed_namespaces_id_seq'::regclass);
@ -30162,6 +30185,9 @@ ALTER TABLE ONLY x509_commit_signatures
ALTER TABLE ONLY x509_issuers
ADD CONSTRAINT x509_issuers_pkey PRIMARY KEY (id);
ALTER TABLE ONLY xray_reports
ADD CONSTRAINT xray_reports_pkey PRIMARY KEY (id);
ALTER TABLE ONLY zentao_tracker_data
ADD CONSTRAINT zentao_tracker_data_pkey PRIMARY KEY (id);
@ -35271,6 +35297,8 @@ CREATE INDEX index_x509_commit_signatures_on_x509_certificate_id ON x509_commit_
CREATE INDEX index_x509_issuers_on_subject_key_identifier ON x509_issuers USING btree (subject_key_identifier);
CREATE UNIQUE INDEX index_xray_reports_on_project_id_and_lang ON xray_reports USING btree (project_id, lang);
CREATE INDEX index_zentao_tracker_data_on_integration_id ON zentao_tracker_data USING btree (integration_id);
CREATE INDEX index_zoekt_indexed_namespaces_on_namespace_id ON zoekt_indexed_namespaces USING btree (namespace_id);
@ -38672,6 +38700,9 @@ ALTER TABLE ONLY reviews
ALTER TABLE ONLY draft_notes
ADD CONSTRAINT fk_rails_2a8dac9901 FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY xray_reports
ADD CONSTRAINT fk_rails_2b13fbecf9 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY dependency_proxy_image_ttl_group_policies
ADD CONSTRAINT fk_rails_2b1896d021 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;

View File

@ -814,7 +814,7 @@ or add fault tolerance and redundancy, you may be
looking at removing dependencies on block or network file systems.
See the following additional guides:
1. Make sure the [`git` user home directory](https://docs.gitlab.com/omnibus/settings/configuration.html#moving-the-home-directory-for-a-user) is on local disk.
1. Make sure the [`git` user home directory](https://docs.gitlab.com/omnibus/settings/configuration.html#move-the-home-directory-for-a-user) is on local disk.
1. Configure [database lookup of SSH keys](operations/fast_ssh_key_lookup.md)
to eliminate the need for a shared `authorized_keys` file.
1. [Prevent local disk usage for job logs](job_logs.md#prevent-local-disk-usage).

View File

@ -20466,6 +20466,24 @@ Returns [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric).
| <a id="groupvaluestreamanalyticsflowmetricsleadtimeto"></a>`to` | [`Time!`](#time) | Timestamp marking the end date and time. |
| <a id="groupvaluestreamanalyticsflowmetricsleadtimeweight"></a>`weight` | [`Int`](#int) | Weight applied to the issue. |
##### `GroupValueStreamAnalyticsFlowMetrics.timeToMerge`
Median time from merge request creation to merge request merged.
Returns [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric).
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="groupvaluestreamanalyticsflowmetricstimetomergeassigneeusernames"></a>`assigneeUsernames` | [`[String!]`](#string) | Usernames of users assigned to the merge request. |
| <a id="groupvaluestreamanalyticsflowmetricstimetomergeauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author of the merge request. |
| <a id="groupvaluestreamanalyticsflowmetricstimetomergefrom"></a>`from` | [`Time!`](#time) | Timestamp marking the start date and time. |
| <a id="groupvaluestreamanalyticsflowmetricstimetomergelabelnames"></a>`labelNames` | [`[String!]`](#string) | Labels applied to the merge request. |
| <a id="groupvaluestreamanalyticsflowmetricstimetomergemilestonetitle"></a>`milestoneTitle` | [`String`](#string) | Milestone applied to the merge request. |
| <a id="groupvaluestreamanalyticsflowmetricstimetomergeprojectids"></a>`projectIds` | [`[ID!]`](#id) | Project IDs within the group hierarchy. |
| <a id="groupvaluestreamanalyticsflowmetricstimetomergeto"></a>`to` | [`Time!`](#time) | Timestamp marking the end date and time. |
### `GroupWikiRepositoryRegistry`
Represents the Geo sync and verification state of a group wiki repository.
@ -25914,6 +25932,23 @@ Returns [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric).
| <a id="projectvaluestreamanalyticsflowmetricsleadtimeto"></a>`to` | [`Time!`](#time) | Timestamp marking the end date and time. |
| <a id="projectvaluestreamanalyticsflowmetricsleadtimeweight"></a>`weight` | [`Int`](#int) | Weight applied to the issue. |
##### `ProjectValueStreamAnalyticsFlowMetrics.timeToMerge`
Median time from merge request creation to merge request merged.
Returns [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric).
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="projectvaluestreamanalyticsflowmetricstimetomergeassigneeusernames"></a>`assigneeUsernames` | [`[String!]`](#string) | Usernames of users assigned to the merge request. |
| <a id="projectvaluestreamanalyticsflowmetricstimetomergeauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author of the merge request. |
| <a id="projectvaluestreamanalyticsflowmetricstimetomergefrom"></a>`from` | [`Time!`](#time) | Timestamp marking the start date and time. |
| <a id="projectvaluestreamanalyticsflowmetricstimetomergelabelnames"></a>`labelNames` | [`[String!]`](#string) | Labels applied to the merge request. |
| <a id="projectvaluestreamanalyticsflowmetricstimetomergemilestonetitle"></a>`milestoneTitle` | [`String`](#string) | Milestone applied to the merge request. |
| <a id="projectvaluestreamanalyticsflowmetricstimetomergeto"></a>`to` | [`Time!`](#time) | Timestamp marking the end date and time. |
### `ProjectWikiRepositoryRegistry`
Represents the Geo replication and verification state of a project_wiki_repository.
@ -30502,6 +30537,7 @@ Iteration ID wildcard values.
| <a id="jobartifactfiletypemetrics_referee"></a>`METRICS_REFEREE` | METRICS REFEREE job artifact file type. |
| <a id="jobartifactfiletypenetwork_referee"></a>`NETWORK_REFEREE` | NETWORK REFEREE job artifact file type. |
| <a id="jobartifactfiletypeperformance"></a>`PERFORMANCE` | PERFORMANCE job artifact file type. |
| <a id="jobartifactfiletyperepository_xray"></a>`REPOSITORY_XRAY` | REPOSITORY XRAY job artifact file type. |
| <a id="jobartifactfiletyperequirements"></a>`REQUIREMENTS` | REQUIREMENTS job artifact file type. |
| <a id="jobartifactfiletyperequirements_v2"></a>`REQUIREMENTS_V2` | REQUIREMENTS V2 job artifact file type. |
| <a id="jobartifactfiletypesast"></a>`SAST` | SAST job artifact file type. |

View File

@ -63,7 +63,7 @@ Supported attributes:
| `health_status` **(ULTIMATE ALL)** | string | No | Return issues with the specified `health_status`. _([Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/370721) in GitLab 15.4)._ In [GitLab 15.5 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/370721), `None` returns issues with no health status assigned, and `Any` returns issues with a health status assigned.
| `iids[]` | integer array | No | Return only the issues having the given `iid`. |
| `in` | string | No | Modify the scope of the `search` attribute. `title`, `description`, or a string joining them with comma. Default is `title,description`. |
| `issue_type` | string | No | Filter to a given type of issue. One of `issue`, `incident`, or `test_case`. |
| `issue_type` | string | No | Filter to a given type of issue. One of `issue`, `incident`, `test_case` or `task`. |
| `iteration_id` **(PREMIUM ALL)** | integer | No | Return issues assigned to the given iteration ID. `None` returns issues that do not belong to an iteration. `Any` returns issues that belong to an iteration. Mutually exclusive with `iteration_title`. |
| `iteration_title` **(PREMIUM ALL)** | string | No | Return issues assigned to the iteration with the given title. Similar to `iteration_id` and mutually exclusive with `iteration_id`. |
| `labels` | string | No | Comma-separated list of label names, issues must have all labels to be returned. `None` lists all issues with no labels. `Any` lists all issues with at least one label. `No+Label` (Deprecated) lists all issues with no labels. Predefined names are case-insensitive. |
@ -295,7 +295,7 @@ Supported attributes:
| `due_date` | string | No | Return issues that have no due date, are overdue, or whose due date is this week, this month, or between two weeks ago and next month. Accepts: `0` (no due date), `any`, `today`, `tomorrow`, `overdue`, `week`, `month`, `next_month_and_previous_two_weeks`. |
| `epic_id` **(PREMIUM ALL)** | integer | No | Return issues associated with the given epic ID. `None` returns issues that are not associated with an epic. `Any` returns issues that are associated with an epic. |
| `iids[]` | integer array | No | Return only the issues having the given `iid`. |
| `issue_type` | string | No | Filter to a given type of issue. One of `issue`, `incident`, or `test_case`. |
| `issue_type` | string | No | Filter to a given type of issue. One of `issue`, `incident`, `test_case` or `task`. |
| `iteration_id` **(PREMIUM ALL)** | integer | No | Return issues assigned to the given iteration ID. `None` returns issues that do not belong to an iteration. `Any` returns issues that belong to an iteration. Mutually exclusive with `iteration_title`. |
| `iteration_title` **(PREMIUM ALL)** | string | No | Return issues assigned to the iteration with the given title. Similar to `iteration_id` and mutually exclusive with `iteration_id`. |
| `labels` | string | No | Comma-separated list of label names, issues must have all labels to be returned. `None` lists all issues with no labels. `Any` lists all issues with at least one label. `No+Label` (Deprecated) lists all issues with no labels. Predefined names are case-insensitive. |
@ -502,7 +502,7 @@ Supported attributes:
| `due_date` | string | No | Return issues that have no due date, are overdue, or whose due date is this week, this month, or between two weeks ago and next month. Accepts: `0` (no due date), `any`, `today`, `tomorrow`, `overdue`, `week`, `month`, `next_month_and_previous_two_weeks`. |
| `epic_id` **(PREMIUM ALL)** | integer | No | Return issues associated with the given epic ID. `None` returns issues that are not associated with an epic. `Any` returns issues that are associated with an epic. |
| `iids[]` | integer array | No | Return only the issues having the given `iid`. |
| `issue_type` | string | No | Filter to a given type of issue. One of `issue`, `incident`, or `test_case`. |
| `issue_type` | string | No | Filter to a given type of issue. One of `issue`, `incident`, `test_case` or `task`. |
| `iteration_id` **(PREMIUM ALL)** | integer | No | Return issues assigned to the given iteration ID. `None` returns issues that do not belong to an iteration. `Any` returns issues that belong to an iteration. Mutually exclusive with `iteration_title`. |
| `iteration_title` **(PREMIUM ALL)** | string | No | Return issues assigned to the iteration with the given title. Similar to `iteration_id` and mutually exclusive with `iteration_id`. |
| `labels` | string | No | Comma-separated list of label names, issues must have all labels to be returned. `None` lists all issues with no labels. `Any` lists all issues with at least one label. `No+Label` (Deprecated) lists all issues with no labels. Predefined names are case-insensitive. |
@ -1030,7 +1030,7 @@ Supported attributes:
| `epic_id` **(PREMIUM ALL)** | integer | No | ID of the epic to add the issue to. Valid values are greater than or equal to 0. |
| `epic_iid` **(PREMIUM ALL)** | integer | No | IID of the epic to add the issue to. Valid values are greater than or equal to 0. (deprecated, [scheduled for removal](https://gitlab.com/gitlab-org/gitlab/-/issues/35157) in API version 5). |
| `iid` | integer/string | No | The internal ID of the project's issue (requires administrator or project owner rights). |
| `issue_type` | string | No | The type of issue. One of `issue`, `incident`, or `test_case`. Default is `issue`. |
| `issue_type` | string | No | The type of issue. One of `issue`, `incident`, `test_case` or `task`. Default is `issue`. |
| `labels` | string | No | Comma-separated label names for an issue. |
| `merge_request_to_resolve_discussions_of` | integer | No | The IID of a merge request in which to resolve all issues. This fills out the issue with a default description and mark all discussions as resolved. When passing a description or title, these values take precedence over the default values.|
| `milestone_id` | integer | No | The global ID of a milestone to assign issue. To find the `milestone_id` associated with a milestone, view an issue with the milestone assigned and [use the API](#single-project-issue) to retrieve the issue's details. |
@ -1201,7 +1201,7 @@ Supported attributes:
| `due_date` | string | No | The due date. Date time string in the format `YYYY-MM-DD`, for example `2016-03-11`. |
| `epic_id` **(PREMIUM ALL)** | integer | No | ID of the epic to add the issue to. Valid values are greater than or equal to 0. |
| `epic_iid` **(PREMIUM ALL)** | integer | No | IID of the epic to add the issue to. Valid values are greater than or equal to 0. (deprecated, [scheduled for removal](https://gitlab.com/gitlab-org/gitlab/-/issues/35157) in API version 5). |
| `issue_type` | string | No | Updates the type of issue. One of `issue`, `incident`, or `test_case`. |
| `issue_type` | string | No | Updates the type of issue. One of `issue`, `incident`, `test_case` or `task`. |
| `labels` | string | No | Comma-separated label names for an issue. Set to an empty string to unassign all labels. |
| `milestone_id` | integer | No | The global ID of a milestone to assign the issue to. Set to `0` or provide an empty value to unassign a milestone.|
| `remove_labels`| string | No | Comma-separated label names to remove from an issue. |

View File

@ -374,7 +374,7 @@ This resource has been moved permanently to https://gitlab.example.com/api/v4/pr
GitLab supports the following pagination methods:
- Offset-based pagination. The default method and available on all endpoints except,
in GitLab 16.5 and later, the `\users` endpoint.
in GitLab 16.5 and later, the `users` endpoint.
- Keyset-based pagination. Added to selected endpoints but being
[progressively rolled out](https://gitlab.com/groups/gitlab-org/-/epics/2039).
@ -383,7 +383,7 @@ For large collections, you should use keyset pagination
### Offset-based pagination
> The `\users` endpoint was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/426547) for offset-based pagination in GitLab 16.5 and is planned for removal in 17.0. This change is a breaking change. Use keyset-based pagination for this endpoint instead.
> The `users` endpoint was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/426547) for offset-based pagination in GitLab 16.5 and is planned for removal in 17.0. This change is a breaking change. Use keyset-based pagination for this endpoint instead.
Sometimes, the returned result spans many pages. When listing resources, you can
pass the following parameters:

View File

@ -292,3 +292,8 @@ These errors can happen if the following are both true:
To resolve this issue, add any projects with CI/CD jobs that fetch images from the container
registry to the target project's [job token allowlist](jobs/ci_job_token.md#allow-access-to-your-project-with-a-job-token).
These errors might also happen when trying to use a [project access token](../user/project/settings/project_access_tokens.md)
to access images in another project. Project access tokens are scoped to one project,
and therefore cannot access images in other projects. You must use [a different token type](../security/token_overview.md)
with wider scope.

View File

@ -312,6 +312,10 @@ artifact and existing [requirements](../../user/project/requirements/index.md) a
GitLab can display the results of one or more reports in the
[project requirements](../../user/project/requirements/index.md#view-a-requirement).
## `artifacts:reports:repository_xray` **(ULTIMATE ALL)**
The `repository_xray` report collects information about your repository for use by AI in code suggestions.
## `artifacts:reports:sast`
> [Moved](https://gitlab.com/groups/gitlab-org/-/epics/2098) from GitLab Ultimate to GitLab Free in 13.3.

View File

@ -11,31 +11,41 @@ that fall under the [Source Code group](https://about.gitlab.com/handbook/produc
of the [Create stage](https://about.gitlab.com/handbook/product/categories/#create-stage)
of the [DevOps lifecycle](https://about.gitlab.com/handbook/product/categories/#devops-stages).
We interface with the Gitaly and Code Review teams. The features
we work with are listed on the
The Source Code Management team interfaces with the Gitaly and Code Review teams and works across three codebases: Workhorse, GitLab Shell and GitLab Rails.
## Source Code Features Reference
Features owned by the Source Code Management group are listed on the
[Features by Group Page](https://about.gitlab.com/handbook/product/categories/features/#createsource-code-group).
The team works across three codebases: Workhorse, GitLab Shell and GitLab Rails.
### Code Owners
## Workhorse
Source Code Management shares ownership of Code Owners with the Code Review group.
[GitLab Workhorse](../../workhorse/index.md) is a smart reverse proxy for GitLab. It handles "large" HTTP
requests such as file downloads, file uploads, `git push`, `git pull` and `git` archive downloads.
- [Feature homepage](../../../user/project/codeowners/index.md)
- [Developer Reference](../../code_owners/index.md)
Workhorse itself is not a feature, but there are several features in GitLab
that would not work efficiently without Workhorse.
### Approval Rules
## GitLab Shell
- [Approval Rules](../../merge_request_concepts/approval_rules.md)
GitLab Shell handles Git SSH sessions for GitLab and modifies the list of authorized keys.
For more information, refer to the [GitLab Shell documentation](../../gitlab_shell/index.md).
### Protected Branches
To learn about the reasoning behind our creation of `gitlab-sshd`, read the blog post
[Why we implemented our own SSHD solution](https://about.gitlab.com/blog/2022/08/17/why-we-have-implemented-our-own-sshd-solution-on-gitlab-sass/).
Details about Protected Branches models can be found in the [Code Owners](../../code_owners/index.md#related-models) technical reference page.
## CODEOWNERS
### Repositories
Source Code Management shares ownership of [Code Owners](../../code_owners/index.md) with the Code Review group.
- [Project Repository Storage Moves](../../repository_storage_moves/index.md)
### Project Templates
- [Custom group-level project templates development guidelines](../../project_templates/index.md)
### Git LFS
- [Git LFS Development guidelines](../../lfs.md)
## Technical Stack
## GitLab Rails
@ -58,3 +68,19 @@ The `:source_code_management` annotation indicates which code belongs to the Sou
group in the Rails codebase. The annotated objects are presented on
[this page](https://gitlab-com.gitlab.io/gl-infra/platform/stage-groups-index/source-code.html) along
with the [Error Budgets dashboards](https://dashboards.gitlab.net/d/stage-groups-source_code/stage-groups3a-source-code3a-group-dashboard?orgId=1).
## GitLab Workhorse
[GitLab Workhorse](../../workhorse/index.md) is a smart reverse proxy for GitLab. It handles "large" HTTP
requests such as file downloads, file uploads, `git push`, `git pull` and `git` archive downloads.
Workhorse itself is not a feature, but there are several features in GitLab
that would not work efficiently without Workhorse.
## GitLab Shell
GitLab Shell handles Git SSH sessions for GitLab and modifies the list of authorized keys.
For more information, refer to the [GitLab Shell documentation](../../gitlab_shell/index.md).
To learn about the reasoning behind our creation of `gitlab-sshd`, read the blog post
[Why we implemented our own SSHD solution](https://about.gitlab.com/blog/2022/08/17/why-we-have-implemented-our-own-sshd-solution-on-gitlab-sass/).

View File

@ -509,5 +509,5 @@ record still exists in the database that the flag was deployed too.
The record can be deleted once the MR is deployed to all the environments:
```shell
/chatops run feature delete <feature-flag-name> --dev --ops --pre --staging --staging-ref --production
/chatops run feature delete <feature-flag-name> --dev --pre --staging --staging-ref --production
```

View File

@ -17,7 +17,7 @@ module Gitlab
dast performance browser_performance load_performance license_scanning metrics lsif
dotenv terraform accessibility
coverage_fuzzing api_fuzzing cluster_image_scanning
requirements requirements_v2 coverage_report cyclonedx annotations].freeze
requirements requirements_v2 coverage_report cyclonedx annotations repository_xray].freeze
attributes ALLOWED_KEYS
@ -51,6 +51,7 @@ module Gitlab
validates :requirements_v2, array_of_strings_or_string: true
validates :cyclonedx, array_of_strings_or_string: true
validates :annotations, array_of_strings_or_string: true
validates :repository_xray, array_of_strings_or_string: true
end
end

View File

@ -168,19 +168,6 @@ module QA
{ name: "add_milestone", label: "0.0.1" }
]
)
# TODO: reenable once https://gitlab.com/gitlab-org/gitlab/-/issues/386714 fixed
# currently this doesn't work as expected if reviewer is not matched by public email
# event for assigning approver is created with reviewer being user doing import but mr actually doesn't
# contain reviewers or the approved state
#
# reviews = merge_request.reviews.map do |review|
# {
# id: review.dig(:user, :id),
# username: review.dig(:user, :username),
# state: review[:state]
# }
# end
# expect(reviews).to eq([{ id: user.id, username: user.username, state: "approved" }])
end
def verify_release_import

View File

@ -8,6 +8,7 @@ import {
import { last } from 'lodash';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import { getUsers, getGroups, getDeployKeys } from '~/projects/settings/api/access_dropdown_api';
import AccessDropdown, { i18n } from '~/projects/settings/components/access_dropdown.vue';
@ -77,6 +78,7 @@ describe('Access Level Dropdown', () => {
label,
disabled,
preselectedItems,
stubs = {},
} = {}) => {
wrapper = shallowMountExtended(AccessDropdown, {
propsData: {
@ -90,6 +92,7 @@ describe('Access Level Dropdown', () => {
stubs: {
GlSprintf,
GlDropdown,
...stubs,
},
});
};
@ -373,15 +376,22 @@ describe('Access Level Dropdown', () => {
});
describe('on dropdown open', () => {
const focusInput = jest.fn();
beforeEach(() => {
createComponent();
createComponent({
stubs: {
GlSearchBoxByType: stubComponent(GlSearchBoxByType, {
methods: { focusInput },
}),
},
});
});
it('should set the search input focus', () => {
wrapper.vm.$refs.search.focusInput = jest.fn();
findDropdown().vm.$emit('shown');
expect(wrapper.vm.$refs.search.focusInput).toHaveBeenCalled();
expect(focusInput).toHaveBeenCalled();
});
});

View File

@ -14,13 +14,14 @@ Vue.use(Vuex);
describe('DropdownContentsCreateView', () => {
let wrapper;
let store;
const colors = Object.keys(mockSuggestedColors).map((color) => ({
[color]: mockSuggestedColors[color],
}));
const createComponent = (initialState = mockConfig) => {
const store = new Vuex.Store(labelSelectModule());
store = new Vuex.Store(labelSelectModule());
store.dispatch('setInitialState', initialState);
wrapper = shallowMountExtended(DropdownContentsCreateView, {
@ -47,7 +48,7 @@ describe('DropdownContentsCreateView', () => {
it('returns `true` when `labelCreateInProgress` is true', async () => {
await findColorSelectorInput().vm.$emit('input', '#ff0000');
await findLabelTitleInput().vm.$emit('input', 'Foo');
wrapper.vm.$store.dispatch('requestCreateLabel');
store.dispatch('requestCreateLabel');
await nextTick();
@ -81,7 +82,6 @@ describe('DropdownContentsCreateView', () => {
describe('getColorName', () => {
it('returns color name from color object', () => {
expect(findAllLinks().at(0).attributes('title')).toBe(Object.values(colors[0]).pop());
expect(wrapper.vm.getColorName(colors[0])).toBe(Object.values(colors[0]).pop());
});
});
@ -97,20 +97,17 @@ describe('DropdownContentsCreateView', () => {
describe('handleCreateClick', () => {
it('calls action `createLabel` with object containing `labelTitle` & `selectedColor`', async () => {
jest.spyOn(wrapper.vm, 'createLabel').mockImplementation();
jest.spyOn(store, 'dispatch').mockImplementation();
await findColorSelectorInput().vm.$emit('input', '#ff0000');
await findLabelTitleInput().vm.$emit('input', 'Foo');
findCreateClickButton().vm.$emit('click');
await nextTick();
expect(wrapper.vm.createLabel).toHaveBeenCalledWith(
expect.objectContaining({
title: 'Foo',
color: '#ff0000',
}),
);
expect(store.dispatch).toHaveBeenCalledWith('createLabel', {
title: 'Foo',
color: '#ff0000',
});
});
});
});
@ -186,7 +183,7 @@ describe('DropdownContentsCreateView', () => {
});
it('shows gl-loading-icon within create button element when `labelCreateInProgress` is `true`', async () => {
wrapper.vm.$store.dispatch('requestCreateLabel');
store.dispatch('requestCreateLabel');
await nextTick();
const loadingIconEl = wrapper.find('.dropdown-actions').findComponent(GlLoadingIcon);

View File

@ -6,6 +6,7 @@ import setIssueTimeEstimateWithoutErrors from 'test_fixtures/graphql/issue_set_t
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { stubComponent } from 'helpers/stub_component';
import SetTimeEstimateForm from '~/sidebar/components/time_tracking/set_time_estimate_form.vue';
import issueSetTimeEstimateMutation from '~/sidebar/queries/issue_set_time_estimate.mutation.graphql';
@ -75,10 +76,13 @@ describe('Set Time Estimate Form', () => {
timeTracking,
},
apolloProvider: createMockApollo([[issueSetTimeEstimateMutation, mutationResolverMock]]),
stubs: {
GlModal: stubComponent(GlModal, {
methods: { close: modalCloseMock },
}),
},
});
wrapper.vm.$refs.modal.close = modalCloseMock;
findModal().vm.$emit('show');
await nextTick();
};

View File

@ -7,7 +7,7 @@ RSpec.describe Resolvers::Ci::Catalog::ResourceResolver, feature_category: :pipe
let_it_be(:namespace) { create(:group) }
let_it_be(:project) { create(:project, :private, namespace: namespace) }
let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
let_it_be(:resource) { create(:ci_catalog_resource, :published, project: project) }
let_it_be(:user) { create(:user) }
describe '#resolve' do
@ -20,7 +20,7 @@ RSpec.describe Resolvers::Ci::Catalog::ResourceResolver, feature_category: :pipe
context 'when resource is found' do
it 'returns a single CI/CD Catalog resource' do
result = resolve(described_class, ctx: { current_user: user },
args: { id: resource.to_global_id.to_s })
args: { id: resource.to_global_id })
expect(result.id).to eq(resource.id)
expect(result.class).to eq(Ci::Catalog::Resource)
@ -30,7 +30,9 @@ RSpec.describe Resolvers::Ci::Catalog::ResourceResolver, feature_category: :pipe
context 'when resource is not found' do
it 'raises ResourceNotAvailable error' do
result = resolve(described_class, ctx: { current_user: user },
args: { id: "gid://gitlab/Ci::Catalog::Resource/not-a-real-id" })
args: { id: GlobalID.new(
::Gitlab::GlobalId.build(model_name: '::Ci::Catalog::Resource', id: "not-a-real-id")
) })
expect(result).to be_a(::Gitlab::Graphql::Errors::ResourceNotAvailable)
end
@ -40,7 +42,7 @@ RSpec.describe Resolvers::Ci::Catalog::ResourceResolver, feature_category: :pipe
context 'when user is not authorised to view the resource' do
it 'raises ResourceNotAvailable error' do
result = resolve(described_class, ctx: { current_user: user },
args: { id: resource.to_global_id.to_s })
args: { id: resource.to_global_id })
expect(result).to be_a(::Gitlab::Graphql::Errors::ResourceNotAvailable)
end
@ -115,7 +117,7 @@ RSpec.describe Resolvers::Ci::Catalog::ResourceResolver, feature_category: :pipe
expect_graphql_error_to_be_created(::Gitlab::Graphql::Errors::ArgumentError,
"Exactly one of 'id' or 'full_path' arguments is required.") do
resolve(described_class, ctx: { current_user: user },
args: { full_path: resource.project.full_path, id: resource.to_global_id.to_s })
args: { full_path: resource.project.full_path, id: resource.to_global_id })
end
end
end

View File

@ -49,6 +49,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports, feature_category: :pipeline_c
:accessibility | 'gl-accessibility.json'
:cyclonedx | 'gl-sbom.cdx.zip'
:annotations | 'gl-annotations.json'
:repository_xray | 'gl-repository-xray.json'
end
with_them do

View File

@ -422,6 +422,7 @@ builds:
- job_artifacts_cluster_image_scanning
- job_artifacts_cyclonedx
- job_artifacts_requirements_v2
- job_artifacts_repository_xray
- runner_manager
- runner_manager_build
- runner_session
@ -833,6 +834,7 @@ project:
- target_branch_rules
- organization
- dora_performance_scores
- xray_reports
award_emoji:
- awardable
- user

View File

@ -76,7 +76,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
job_artifacts_network_referee job_artifacts_dotenv
job_artifacts_cobertura needs job_artifacts_accessibility
job_artifacts_requirements job_artifacts_coverage_fuzzing
job_artifacts_requirements_v2
job_artifacts_requirements_v2 job_artifacts_repository_xray
job_artifacts_api_fuzzing terraform_state_versions job_artifacts_cyclonedx
job_annotations job_artifacts_annotations].freeze
end

View File

@ -22,7 +22,7 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
)
end
let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
let_it_be(:resource) { create(:ci_catalog_resource, :published, project: project) }
let(:query) do
<<~GQL

View File

@ -9,6 +9,8 @@ module Support
class RSpecFormatter < RSpec::Core::Formatters::BaseFormatter
include Tooling::Helpers::DurationFormatter
TIME_LIMIT_IN_MINUTES = 80
RSpec::Core::Formatters.register self, :example_group_started, :example_group_finished
def start(_notification)
@ -20,6 +22,11 @@ module Support
end
def example_group_started(notification)
if @last_elapsed_seconds && @last_elapsed_seconds > TIME_LIMIT_IN_MINUTES * 60
RSpec::Expectations.fail_with(
"Rspec suite is exceeding the #{TIME_LIMIT_IN_MINUTES} minute limit and is forced to exit with error.")
end
if @group_level == 0
@current_group_start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
file_path = spec_file_path(notification)
@ -37,7 +44,8 @@ module Support
time_now = Process.clock_gettime(Process::CLOCK_MONOTONIC)
actual_duration = time_now - @current_group_start_time
output.puts "\n# [RSpecRunTime] #{file_path} took #{readable_duration(actual_duration)}. " \
output.puts "\n# [RSpecRunTime] Finishing example group #{file_path}. " \
"It took #{readable_duration(actual_duration)}. " \
"#{expected_run_time(file_path)}"
end

View File

@ -178,7 +178,7 @@ RSpec.shared_examples 'submits edit runner form' do
context 'when a runner is updated', :js do
before do
fill_in s_('Runners|Runner description'), with: 'new-runner-description'
fill_in s_('Runners|Runner description'), with: 'new-runner-description', fill_options: { clear: :backspace }
click_on _('Save changes')
end

View File

@ -38,6 +38,15 @@ type API struct {
Version string
}
type PreAuthorizeFixedPathError struct {
StatusCode int
Status string
}
func (p *PreAuthorizeFixedPathError) Error() string {
return fmt.Sprintf("no api response: status %d", p.StatusCode)
}
var (
requestsCounter = promauto.NewCounterVec(
prometheus.CounterOpts{
@ -326,7 +335,7 @@ func (api *API) PreAuthorizeFixedPath(r *http.Request, method string, path strin
failureResponse.Body.Close()
if apiResponse == nil {
return nil, fmt.Errorf("no api response: status %d", failureResponse.StatusCode)
return nil, &PreAuthorizeFixedPathError{StatusCode: failureResponse.StatusCode, Status: failureResponse.Status}
}
return apiResponse, nil

View File

@ -47,6 +47,59 @@ func TestGetGeoProxyDataForResponses(t *testing.T) {
}
}
func TestPreAuthorizeFixedPath_OK(t *testing.T) {
var (
upstreamHeaders http.Header
upstreamQuery url.Values
)
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/my/api/path" {
return
}
upstreamHeaders = r.Header
upstreamQuery = r.URL.Query()
w.Header().Set("Content-Type", ResponseContentType)
io.WriteString(w, `{"TempPath":"HELLO!!"}`)
}))
defer ts.Close()
req, err := http.NewRequest("GET", "/original/request/path?q1=Q1&q2=Q2", nil)
require.NoError(t, err)
req.Header.Set("key1", "value1")
api := NewAPI(helper.URLMustParse(ts.URL), "123", http.DefaultTransport)
resp, err := api.PreAuthorizeFixedPath(req, "POST", "/my/api/path")
require.NoError(t, err)
require.Equal(t, "value1", upstreamHeaders.Get("key1"), "original headers must propagate")
require.Equal(t, url.Values{"q1": []string{"Q1"}, "q2": []string{"Q2"}}, upstreamQuery,
"original query must propagate")
require.Equal(t, "HELLO!!", resp.TempPath, "sanity check: successful API call")
}
func TestPreAuthorizeFixedPath_Unauthorized(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/my/api/path" {
return
}
w.WriteHeader(http.StatusUnauthorized)
}))
defer ts.Close()
req, err := http.NewRequest("GET", "/original/request/path?q1=Q1&q2=Q2", nil)
require.NoError(t, err)
api := NewAPI(helper.URLMustParse(ts.URL), "123", http.DefaultTransport)
resp, err := api.PreAuthorizeFixedPath(req, "POST", "/my/api/path")
require.Nil(t, resp)
preAuthError := &PreAuthorizeFixedPathError{StatusCode: 401, Status: "Unauthorized 401"}
require.ErrorAs(t, err, &preAuthError)
}
func getGeoProxyDataGivenResponse(t *testing.T, givenInternalApiResponse string) (*GeoProxyData, error) {
t.Helper()
ts := testRailsServer(regexp.MustCompile(`/api/v4/geo/proxy`), 200, givenInternalApiResponse)
@ -77,35 +130,3 @@ func testRailsServer(url *regexp.Regexp, code int, body string) *httptest.Server
fmt.Fprint(w, body)
})
}
func TestPreAuthorizeFixedPath(t *testing.T) {
var (
upstreamHeaders http.Header
upstreamQuery url.Values
)
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/my/api/path" {
return
}
upstreamHeaders = r.Header
upstreamQuery = r.URL.Query()
w.Header().Set("Content-Type", ResponseContentType)
io.WriteString(w, `{"TempPath":"HELLO!!"}`)
}))
defer ts.Close()
req, err := http.NewRequest("GET", "/original/request/path?q1=Q1&q2=Q2", nil)
require.NoError(t, err)
req.Header.Set("key1", "value1")
api := NewAPI(helper.URLMustParse(ts.URL), "123", http.DefaultTransport)
resp, err := api.PreAuthorizeFixedPath(req, "POST", "/my/api/path")
require.NoError(t, err)
require.Equal(t, "value1", upstreamHeaders.Get("key1"), "original headers must propagate")
require.Equal(t, url.Values{"q1": []string{"Q1"}, "q2": []string{"Q2"}}, upstreamQuery,
"original query must propagate")
require.Equal(t, "HELLO!!", resp.TempPath, "sanity check: successful API call")
}

View File

@ -69,13 +69,14 @@ func interceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Hand
return
}
var protocolErr textproto.ProtocolError
if errors.As(err, &protocolErr) {
switch t := err.(type) {
case textproto.ProtocolError:
fail.Request(w, r, err, fail.WithStatus(http.StatusBadRequest))
return
case *api.PreAuthorizeFixedPathError:
fail.Request(w, r, err, fail.WithStatus(t.StatusCode), fail.WithBody(t.Status))
default:
fail.Request(w, r, fmt.Errorf("handleFileUploads: extract files from multipart: %v", err))
}
fail.Request(w, r, fmt.Errorf("handleFileUploads: extract files from multipart: %v", err))
}
return
}

View File

@ -232,20 +232,7 @@ func TestUploadProcessingField(t *testing.T) {
func TestUploadingMultipleFiles(t *testing.T) {
testhelper.ConfigureSecret()
var buffer bytes.Buffer
writer := multipart.NewWriter(&buffer)
for i := 0; i < 11; i++ {
_, err := writer.CreateFormFile(fmt.Sprintf("file %v", i), "my.file")
require.NoError(t, err)
}
require.NoError(t, writer.Close())
httpRequest, err := http.NewRequest("PUT", "/url/path", &buffer)
require.NoError(t, err)
httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
response := httptest.NewRecorder()
httpRequest, response := setupMultipleFiles(t)
testInterceptMultipartFiles(t, response, httpRequest, nilHandler, &testFormProcessor{})
@ -357,6 +344,23 @@ func TestBadMultipartHeader(t *testing.T) {
require.Equal(t, 400, response.Code)
}
func TestUnauthorizedMultipartHeader(t *testing.T) {
testhelper.ConfigureSecret()
httpRequest, response := setupMultipleFiles(t)
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusUnauthorized)
}))
defer ts.Close()
api := api.NewAPI(helper.URLMustParse(ts.URL), "123", http.DefaultTransport)
interceptMultipartFiles(response, httpRequest, nilHandler, &testFormProcessor{}, &apiAuthorizer{api}, &DefaultPreparer{})
require.Equal(t, 401, response.Code)
require.Equal(t, "401 Unauthorized\n", response.Body.String())
}
func TestMalformedMimeHeader(t *testing.T) {
testhelper.ConfigureSecret()
@ -571,3 +575,22 @@ func testInterceptMultipartFiles(t *testing.T, w http.ResponseWriter, r *http.Re
interceptMultipartFiles(w, r, h, filter, fa, preparer)
}
func setupMultipleFiles(t *testing.T) (*http.Request, *httptest.ResponseRecorder) {
var buffer bytes.Buffer
t.Helper()
writer := multipart.NewWriter(&buffer)
for i := 0; i < 11; i++ {
_, err := writer.CreateFormFile(fmt.Sprintf("file %v", i), "my.file")
require.NoError(t, err)
}
require.NoError(t, writer.Close())
httpRequest, err := http.NewRequest("PUT", "/url/path", &buffer)
require.NoError(t, err)
httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
return httpRequest, httptest.NewRecorder()
}