Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
a33d103465
commit
f3af1e76dc
|
|
@ -1,56 +0,0 @@
|
|||
<!--
|
||||
## Implementation Issue To-Do list
|
||||
(_NOTE: This section can be removed when the issue is ready for creation_)
|
||||
- [ ] Ensure that issue title is concise yet descriptive
|
||||
- [ ] Add `Frontend :` or `Backend: ` per group [naming conventions](https://about.gitlab.com/handbook/engineering/development/ops/verify/pipeline-authoring/#splitting-issues)
|
||||
- [ ] Ensure the issue containing the feature or change proposal and related discussions is linked as related to this implementation issue.
|
||||
- [ ] Aside from default labeling, please make sure to include relevant labels for `type::`, `workflow::`, and `~frontend`/`~backend` labeling.
|
||||
- [ ] Issues with user-facing changes should include the `~UX` label.
|
||||
-->
|
||||
|
||||
## Summary
|
||||
|
||||
## Proposal
|
||||
|
||||
## Confirm purpose and User Reception (how does this benefit the user?)
|
||||
|
||||
## Additional details
|
||||
<!--
|
||||
_NOTE: If the issue has addressed all of these questions, this separate section can be removed._
|
||||
-->
|
||||
|
||||
Some relevant technical details, if applicable, such as:
|
||||
|
||||
- Does this need a ~"feature flag"?
|
||||
- Does there need to be an associated ~"instrumentation" issue created related to this work?
|
||||
- Is there an example response showing the data structure that should be returned (new endpoints only)?
|
||||
- What permissions should be used?
|
||||
- Is this EE or CE?
|
||||
- [ ] EE
|
||||
- [ ] CE
|
||||
- Additional comments:
|
||||
|
||||
## Implementation Table
|
||||
|
||||
<!--
|
||||
_NOTE: If the issue is not part of an epic, the implementation table can be removed. If it is part of an epic, make sure that the implementation table below mirrors the corresponding epic's implementation table content._
|
||||
-->
|
||||
|
||||
|
||||
| Group | Issue Link |
|
||||
| ------ | ------ |
|
||||
| ~backend | :point_left: You are here |
|
||||
| ~frontend | [#123123](url) |
|
||||
|
||||
<!--
|
||||
## Documentation
|
||||
|
||||
_NOTE: This section is optional, but can be used for easy access to any relevant documentation URLs._
|
||||
-->
|
||||
|
||||
## Links/References
|
||||
|
||||
|
||||
|
||||
|
||||
/label ~"group::pipeline authoring" ~"Category:Pipeline Composition" ~"section::ci" ~"devops::verify" ~"workflow::planning breakdown"
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
## Summary
|
||||
<!--
|
||||
required
|
||||
|
||||
Confirm the purpose and expected user reception, especially: How does this benefit the user?
|
||||
-->
|
||||
|
||||
## Definition of Done
|
||||
<!-- required -->
|
||||
<!-- What is needed in order for this issue to be released? -->
|
||||
|
||||
<!-- Example:
|
||||
- Users can drill down in the pipeline to view at a stage level
|
||||
- The feature flag is rolled out
|
||||
- Relevant documentation is added or updated
|
||||
- Relevant test coverages of all testing requirements are added or updated
|
||||
-->
|
||||
|
||||
## Details
|
||||
<!--
|
||||
Optional (when design needed):
|
||||
|
||||
#### Design spec
|
||||
- [Figma dev 🔗](Figma board link)
|
||||
- [Design tab 👉](Design tab link)
|
||||
-->
|
||||
|
||||
## Roles involved
|
||||
<!-- to be checked by ICs -->
|
||||
- [ ] Design
|
||||
- [ ] Technical Writing
|
||||
- [ ] Backend
|
||||
- [ ] Frontend
|
||||
- [ ] Test engineering
|
||||
|
||||
**NOTE:** When work is ready to be defined, please create appropriate sub-tasks.
|
||||
|
||||
/label ~"group::pipeline authoring" ~"Category:Pipeline Composition" ~"section::ci" ~"devops::verify"
|
||||
|
|
@ -1 +1 @@
|
|||
1a16fa05c2645a0abba4e2f028e1fdbe5d85be2f
|
||||
dcb656ee580c8503bf25c06dd6ee117178dcb2a1
|
||||
|
|
|
|||
|
|
@ -1463,9 +1463,6 @@ class MergeRequest < ApplicationRecord
|
|||
def cache_merge_request_closes_issues!(current_user = self.author)
|
||||
return if closed? || merged?
|
||||
|
||||
issue_ids_existing = merge_requests_closing_issues
|
||||
.from_mr_description
|
||||
.pluck(:issue_id)
|
||||
issues_to_close_ids = closes_issues(current_user).reject { |issue| issue.is_a?(ExternalIssue) }.map(&:id)
|
||||
|
||||
transaction do
|
||||
|
|
@ -1481,29 +1478,23 @@ class MergeRequest < ApplicationRecord
|
|||
end
|
||||
|
||||
issue_ids_to_create = issues_to_close_ids - issue_ids_to_update
|
||||
next unless issue_ids_to_create.any?
|
||||
|
||||
if issue_ids_to_create.any?
|
||||
now = Time.zone.now
|
||||
new_associations = issue_ids_to_create.map do |issue_id|
|
||||
MergeRequestsClosingIssues.new(
|
||||
issue_id: issue_id,
|
||||
merge_request_id: id,
|
||||
from_mr_description: true,
|
||||
created_at: now,
|
||||
updated_at: now
|
||||
)
|
||||
end
|
||||
|
||||
# We can't skip validations here in bulk insert as we don't have a unique constraint on the DB.
|
||||
# We can skip validations once we have validated the unique constraint
|
||||
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/456965
|
||||
MergeRequestsClosingIssues.bulk_insert!(new_associations, batch_size: 100)
|
||||
now = Time.zone.now
|
||||
new_associations = issue_ids_to_create.map do |issue_id|
|
||||
MergeRequestsClosingIssues.new(
|
||||
issue_id: issue_id,
|
||||
merge_request_id: id,
|
||||
from_mr_description: true,
|
||||
created_at: now,
|
||||
updated_at: now
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
ids_for_trigger = (issue_ids_existing + issues_to_close_ids).uniq
|
||||
WorkItem.id_in(ids_for_trigger).find_each(batch_size: 100) do |work_item|
|
||||
GraphqlTriggers.work_item_updated(work_item)
|
||||
# We can't skip validations here in bulk insert as we don't have a unique constraint on the DB.
|
||||
# We can skip validations once we have validated the unique constraint
|
||||
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/456965
|
||||
MergeRequestsClosingIssues.bulk_insert!(new_associations, batch_size: 100)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -384,9 +384,14 @@ class IssuableBaseService < ::BaseContainerService
|
|||
end
|
||||
end
|
||||
|
||||
trigger_update_subscriptions(issuable, old_associations)
|
||||
|
||||
issuable
|
||||
end
|
||||
|
||||
# Overriden in child class
|
||||
def trigger_update_subscriptions(issuable, old_associations); end
|
||||
|
||||
def transaction_update(issuable, opts = {})
|
||||
touch = opts[:save_with_touch] || false
|
||||
|
||||
|
|
|
|||
|
|
@ -69,15 +69,51 @@ module MergeRequests
|
|||
MergeRequests::CloseService
|
||||
end
|
||||
|
||||
def after_update(issuable, old_associations)
|
||||
def after_update(merge_request, old_associations)
|
||||
super
|
||||
issuable.cache_merge_request_closes_issues!(current_user)
|
||||
|
||||
merge_request.cache_merge_request_closes_issues!(current_user)
|
||||
@trigger_work_item_updated = true
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :target_branch_was_deleted
|
||||
|
||||
def trigger_updated_work_item_on_closing_issues(merge_request, old_closing_issues_ids)
|
||||
new_issue_ids = merge_request.merge_requests_closing_issues.limit(1000).pluck(:issue_id) # rubocop:disable CodeReuse/ActiveRecord -- Implementation would be the same in the model
|
||||
all_issue_ids = new_issue_ids | old_closing_issues_ids
|
||||
return if all_issue_ids.blank?
|
||||
|
||||
WorkItem.id_in(all_issue_ids).find_each(batch_size: 100) do |work_item| # rubocop:disable CodeReuse/ActiveRecord -- Implementation would be the same in the model
|
||||
GraphqlTriggers.work_item_updated(work_item)
|
||||
end
|
||||
end
|
||||
|
||||
override :associations_before_update
|
||||
def associations_before_update(merge_request)
|
||||
super.merge(
|
||||
closing_issues_ids: merge_request.merge_requests_closing_issues.limit(1000).pluck(:issue_id) # rubocop:disable CodeReuse/ActiveRecord -- Implementation would be the same in the model
|
||||
)
|
||||
end
|
||||
|
||||
override :change_state
|
||||
def change_state(merge_request)
|
||||
return unless super
|
||||
|
||||
@trigger_work_item_updated = true
|
||||
end
|
||||
|
||||
override :trigger_update_subscriptions
|
||||
def trigger_update_subscriptions(merge_request, old_associations)
|
||||
return unless @trigger_work_item_updated
|
||||
|
||||
trigger_updated_work_item_on_closing_issues(
|
||||
merge_request,
|
||||
old_associations.fetch(:closing_issues_ids, [])
|
||||
)
|
||||
end
|
||||
|
||||
def general_fallback(merge_request)
|
||||
# We don't allow change of source/target projects and source branch
|
||||
# after merge request was created
|
||||
|
|
|
|||
|
|
@ -930,6 +930,9 @@ Gitlab.ee do
|
|||
Settings.cron_jobs['gitlab_subscriptions_add_on_purchases_cleanup_worker'] ||= {}
|
||||
Settings.cron_jobs['gitlab_subscriptions_add_on_purchases_cleanup_worker']['cron'] ||= '0 1 * * *'
|
||||
Settings.cron_jobs['gitlab_subscriptions_add_on_purchases_cleanup_worker']['job_class'] = 'GitlabSubscriptions::AddOnPurchases::CleanupWorker'
|
||||
Settings.cron_jobs['observability_alert_query_worker'] ||= {}
|
||||
Settings.cron_jobs['observability_alert_query_worker']['cron'] ||= '* * * * *'
|
||||
Settings.cron_jobs['observability_alert_query_worker']['job_class'] = 'Observability::AlertQueryWorker'
|
||||
|
||||
Gitlab.com do
|
||||
Settings.cron_jobs['disable_legacy_open_source_license_for_inactive_projects'] ||= {}
|
||||
|
|
@ -1022,6 +1025,23 @@ Gitlab.ee do
|
|||
Settings.cloud_connector['base_url'] ||= ENV['CLOUD_CONNECTOR_BASE_URL'] || 'https://cloud.gitlab.com'
|
||||
end
|
||||
|
||||
#
|
||||
# Duo Workflow
|
||||
#
|
||||
Gitlab.ee do
|
||||
Settings['duo_workflow'] ||= {}
|
||||
Settings.duo_workflow.reverse_merge!(
|
||||
secure: true
|
||||
)
|
||||
|
||||
# Default to proxy via Cloud Connector
|
||||
unless Settings.duo_workflow['service_url'].present?
|
||||
cloud_connector_uri = URI.parse(Settings.cloud_connector.base_url)
|
||||
Settings.duo_workflow['service_url'] = "#{cloud_connector_uri.host}:#{cloud_connector_uri.port}"
|
||||
Settings.duo_workflow['secure'] = cloud_connector_uri.scheme == 'https'
|
||||
end
|
||||
end
|
||||
|
||||
#
|
||||
# Zoekt credentials
|
||||
#
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ description: Store build-related runner session. Data is removed after the respe
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/6208
|
||||
milestone: '11.1'
|
||||
gitlab_schema: gitlab_ci
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/459994
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ description: CI/CD variables set to a job when running it manually.
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/14784
|
||||
milestone: '12.2'
|
||||
gitlab_schema: gitlab_ci
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463243
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ description: Stores user provided annotations for jobs. Currently storing extra
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/117319
|
||||
milestone: '16.1'
|
||||
gitlab_schema: gitlab_ci
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463246
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ description: Routing table for ci_pipeline_variables
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141270
|
||||
milestone: '16.9'
|
||||
gitlab_schema: gitlab_ci
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463244
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
stage: SaaS Platforms
|
||||
group: GitLab Dedicated
|
||||
stage: Verify
|
||||
group: Hosted Runners
|
||||
description: Use hosted runners to run your CI/CD jobs on GitLab Dedicated.
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
---
|
||||
|
|
|
|||
|
|
@ -54,6 +54,8 @@ tutorial for a version of this feature. It's available to all
|
|||
subscription levels, supports writing secrets to and deleting secrets from Vault,
|
||||
and supports multiple secrets engines.
|
||||
|
||||
You must replace the `vault.example.com` URL below with the URL of your Vault server, and `gitlab.example.com` with the URL of your GitLab instance.
|
||||
|
||||
## Vault Secrets Engines
|
||||
|
||||
> - `generic` option [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/366492) in GitLab Runner 16.11.
|
||||
|
|
@ -108,9 +110,13 @@ To configure your Vault server:
|
|||
If no role is specified, Vault uses the [default role](https://developer.hashicorp.com/vault/api-docs/auth/jwt#default_role)
|
||||
specified when the authentication method was configured.
|
||||
- `VAULT_AUTH_PATH` - Optional. The path where the authentication method is mounted, default is `jwt`.
|
||||
- `VAULT_NAMESPACE` - Optional. The [Vault Enterprise namespace](https://developer.hashicorp.com/vault/docs/enterprise/namespaces) to use for reading secrets and authentication.
|
||||
If no namespace is specified, Vault uses the `root` ("`/`") namespace.
|
||||
The setting is ignored by Vault Open Source.
|
||||
- `VAULT_NAMESPACE` - Optional. The [Vault Enterprise namespace](https://developer.hashicorp.com/vault/docs/enterprise/namespaces)
|
||||
to use for reading secrets and authentication. With:
|
||||
- Vault, the `root` ("`/`") namespace is used when no namespace is specified.
|
||||
- Vault Open source, the setting is ignored.
|
||||
- [HashiCorp Cloud Platform (HCP)](https://www.hashicorp.com/cloud) Vault, a namespace
|
||||
is required. HCP Vault uses the `admin` namespace as the root namespace by default.
|
||||
For example, `VAULT_NAMESPACE=admin`.
|
||||
|
||||
NOTE:
|
||||
Support for providing these values in the user interface [is tracked in this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/218677).
|
||||
|
|
@ -128,7 +134,7 @@ the secrets stored in Vault by defining them with the [`vault` keyword](../yaml/
|
|||
job_using_vault:
|
||||
id_tokens:
|
||||
VAULT_ID_TOKEN:
|
||||
aud: https://gitlab.com
|
||||
aud: https://vault.example.com
|
||||
secrets:
|
||||
DATABASE_PASSWORD:
|
||||
vault: production/db/password@ops # translates to secret `ops/data/production/db`, field `password`
|
||||
|
|
@ -151,7 +157,7 @@ To overwrite the default behavior, set the `file` option explicitly:
|
|||
secrets:
|
||||
id_tokens:
|
||||
VAULT_ID_TOKEN:
|
||||
aud: https://gitlab.com
|
||||
aud: https://vault.example.com
|
||||
DATABASE_PASSWORD:
|
||||
vault: production/db/password@ops
|
||||
file: false
|
||||
|
|
@ -172,7 +178,7 @@ For example, to set the secret engine and path for Artifactory:
|
|||
job_using_vault:
|
||||
id_tokens:
|
||||
VAULT_ID_TOKEN:
|
||||
aud: https://gitlab.com
|
||||
aud: https://vault.example.com
|
||||
secrets:
|
||||
JFROG_TOKEN:
|
||||
vault:
|
||||
|
|
@ -212,6 +218,7 @@ $ vault write auth/jwt/role/myproject-production - <<EOF
|
|||
"policies": ["myproject-production"],
|
||||
"token_explicit_max_ttl": 60,
|
||||
"user_claim": "user_email",
|
||||
"bound_audiences": "https://vault.example.com",
|
||||
"bound_claims_type": "glob",
|
||||
"bound_claims": {
|
||||
"project_id": "42",
|
||||
|
|
|
|||
|
|
@ -57,20 +57,54 @@ To fix this issue, here are some possible solutions.
|
|||
|
||||
### Increase the POST buffer size in Git
|
||||
|
||||
**If you're using Git over HTTP instead of SSH**, you can try increasing the POST buffer size in Git
|
||||
configuration.
|
||||
|
||||
Example of an error during a clone:
|
||||
`fatal: pack has bad object at offset XXXXXXXXX: inflate returned -5`
|
||||
|
||||
Open a terminal and enter:
|
||||
When you attempt to push large repositories with Git over HTTPS, you might get an error message like:
|
||||
|
||||
```shell
|
||||
git config http.postBuffer 52428800
|
||||
fatal: pack has bad object at offset XXXXXXXXX: inflate returned -5
|
||||
```
|
||||
|
||||
The value is specified in bytes, so in the above case the buffer size has been
|
||||
set to 50 MB. The default is 1 MB.
|
||||
To resolve this issue:
|
||||
|
||||
- Increase the
|
||||
[http.postBuffer](https://git-scm.com/docs/git-config#Documentation/git-config.txt-httppostBuffer)
|
||||
value in your local Git configuration. The default value is 1 MB. For example, if `git clone`
|
||||
fails when cloning a 500 MB repository, execute the following:
|
||||
|
||||
1. Open a terminal or command prompt.
|
||||
1. Increase the `http.postBuffer` value:
|
||||
|
||||
```shell
|
||||
# Set the http.postBuffer size in bytes
|
||||
git config http.postBuffer 524288000
|
||||
```
|
||||
|
||||
If the local configuration doesn't resolve the issue, you may need to modify the server configuration.
|
||||
This should be done cautiously and only if you have server access.
|
||||
|
||||
- Increase the `http.postBuffer` on the server side:
|
||||
|
||||
1. Open a terminal or command prompt.
|
||||
1. Modify the GitLab instance's
|
||||
[`gitlab.rb`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/13.5.1+ee.0/files/gitlab-config-template/gitlab.rb.template#L1435-1455) file:
|
||||
|
||||
```ruby
|
||||
gitaly['configuration'] = {
|
||||
# ...
|
||||
git: {
|
||||
# ...
|
||||
config: [
|
||||
# Set the http.postBuffer size, in bytes
|
||||
{key: "http.postBuffer", value: "524288000"},
|
||||
],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
1. Apply the configuration change:
|
||||
|
||||
```shell
|
||||
sudo gitlab-ctl reconfigure
|
||||
```
|
||||
|
||||
### Stream 0 was not closed cleanly
|
||||
|
||||
|
|
@ -261,62 +295,17 @@ This problem is common in Git itself, due to its inability to handle large files
|
|||
- The number of revisions in the history.
|
||||
- The existence of large files in the repository.
|
||||
|
||||
The root causes vary, so multiple potential solutions exist, and you may need to
|
||||
apply more than one:
|
||||
If this error occurs when cloning a large repository, you can
|
||||
[decrease the cloning depth](../../user/project/repository/monorepos/index.md#shallow-cloning) to a value of `1`. For example:
|
||||
|
||||
- If this error occurs when cloning a large repository, you can
|
||||
[decrease the cloning depth](../../user/project/repository/monorepos/index.md#shallow-cloning)
|
||||
to a value of `1`. For example:
|
||||
This approach doesn't resolve the underlying cause, but you can successfully clone the repository.
|
||||
To decrease the cloning depth to `1`, run:
|
||||
|
||||
```shell
|
||||
variables:
|
||||
GIT_DEPTH: 1
|
||||
```
|
||||
|
||||
- You can increase the
|
||||
[http.postBuffer](https://git-scm.com/docs/git-config#Documentation/git-config.txt-httppostBuffer)
|
||||
value in your local Git configuration from the default 1 MB value to a value greater
|
||||
than the repository size. For example, if `git clone` fails when cloning a 500 MB
|
||||
repository, you should set `http.postBuffer` to `524288000`:
|
||||
|
||||
```shell
|
||||
# Set the http.postBuffer size, in bytes
|
||||
git config http.postBuffer 524288000
|
||||
```
|
||||
|
||||
- You can increase the `http.postBuffer` on the server side:
|
||||
|
||||
1. Modify the GitLab instance's
|
||||
[`gitlab.rb`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/13.5.1+ee.0/files/gitlab-config-template/gitlab.rb.template#L1435-1455) file:
|
||||
|
||||
```ruby
|
||||
gitaly['configuration'] = {
|
||||
# ...
|
||||
git: {
|
||||
# ...
|
||||
config: [
|
||||
# Set the http.postBuffer size, in bytes
|
||||
{key: "http.postBuffer", value: "524288000"},
|
||||
],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
1. After applying this change, apply the configuration change:
|
||||
|
||||
```shell
|
||||
sudo gitlab-ctl reconfigure
|
||||
```
|
||||
|
||||
For example, if a repository has a very long history and no large files, changing
|
||||
the depth should fix the problem. However, if a repository has very large files,
|
||||
even a depth of 1 may be too large, thus requiring the `postBuffer` change.
|
||||
If you increase your local `postBuffer` but the NGINX value on the backend is still
|
||||
too small, the error persists.
|
||||
|
||||
Modifying the server is not always an option, and introduces more potential risk.
|
||||
Attempt local changes first.
|
||||
|
||||
## Password expired error on Git fetch with SSH for LDAP user
|
||||
|
||||
If `git fetch` returns this `HTTP 403 Forbidden` error on a self-managed instance of
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ DETAILS:
|
|||
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
|
||||
|
||||
- Helps you write code more efficiently by generating code and showing suggestions as you type.
|
||||
- Large language model (LLM) for code completion: Vertex AI Codey [`code-gecko`](https://console.cloud.google.com/vertex-ai/publishers/google/model-garden/code-gecko)
|
||||
- LLM for code completion: Vertex AI Codey [`code-gecko`](https://console.cloud.google.com/vertex-ai/publishers/google/model-garden/code-gecko)
|
||||
- LLM for code generation: Anthropic [Claude 3.5 Sonnet](https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-5-sonnet)
|
||||
- <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Watch overview](https://youtu.be/ds7SG1wgcVM)
|
||||
- [View documentation](../project/repository/code_suggestions/index.md).
|
||||
|
|
@ -93,7 +93,7 @@ DETAILS:
|
|||
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
|
||||
|
||||
- Helps you merge more quickly by generating meaningful commit messages.
|
||||
- LLM: Anthropic [Claude 3.5 Sonnet](https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-5-sonnet).
|
||||
- LLM: Anthropic [Claude 3.5 Sonnet](https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-5-sonnet)
|
||||
- [View documentation](../project/merge_requests/duo_in_merge_requests.md#generate-a-merge-commit-message).
|
||||
|
||||
### Root cause analysis
|
||||
|
|
@ -117,7 +117,7 @@ DETAILS:
|
|||
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
|
||||
|
||||
- Helps you understand vulnerabilities, how they can be exploited, and how to fix them.
|
||||
- LLM: Anthropic [Claude 3 Haiku](https://docs.anthropic.com/en/docs/about-claude/models#claude-3-a-new-generation-of-ai).
|
||||
- LLM: Anthropic [Claude 3 Haiku](https://docs.anthropic.com/en/docs/about-claude/models#claude-3-a-new-generation-of-ai)
|
||||
- <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Watch overview](https://www.youtube.com/watch?v=MMVFvGrmMzw&list=PLFGfElNsQthZGazU1ZdfDpegu0HflunXW)
|
||||
- [View documentation](../application_security/vulnerabilities/index.md#explaining-a-vulnerability).
|
||||
|
||||
|
|
@ -155,7 +155,7 @@ DETAILS:
|
|||
**Status:** Beta
|
||||
|
||||
- Help resolve a vulnerability by generating a merge request that addresses it.
|
||||
- LLM: Anthropic's [`claude-3.5-sonnet`](https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-5-sonnet).
|
||||
- LLM: Anthropic [Claude 3.5 Sonnet](https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-5-sonnet)
|
||||
- <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Watch overview](https://www.youtube.com/watch?v=VJmsw_C125E&list=PLFGfElNsQthZGazU1ZdfDpegu0HflunXW)
|
||||
- [View documentation](../application_security/vulnerabilities/index.md#vulnerability-resolution).
|
||||
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ module Gitlab
|
|||
EXCLUDED_QUERY_PARAM_NAMES = %w[
|
||||
search
|
||||
search_title
|
||||
search_query
|
||||
term
|
||||
name
|
||||
filter
|
||||
|
|
@ -17,7 +18,9 @@ module Gitlab
|
|||
body
|
||||
commit_message
|
||||
content
|
||||
description
|
||||
].freeze
|
||||
NESTED_PARAMETERS_MAX_LEVEL = 5
|
||||
|
||||
def initialize(app)
|
||||
@app = app
|
||||
|
|
@ -44,6 +47,7 @@ module Gitlab
|
|||
private
|
||||
|
||||
def check(request, log_params)
|
||||
original_fullpath = request.fullpath
|
||||
exclude_query_parameters(request)
|
||||
|
||||
decoded_fullpath = CGI.unescape(request.fullpath)
|
||||
|
|
@ -51,7 +55,7 @@ module Gitlab
|
|||
return unless Gitlab::PathTraversal.path_traversal?(decoded_fullpath, match_new_line: false)
|
||||
|
||||
log_params[:method] = request.request_method
|
||||
log_params[:fullpath] = request.fullpath
|
||||
log_params[:fullpath] = original_fullpath
|
||||
log_params[:message] = PATH_TRAVERSAL_MESSAGE
|
||||
end
|
||||
|
||||
|
|
@ -59,10 +63,18 @@ module Gitlab
|
|||
query_params = request.GET
|
||||
return if query_params.empty?
|
||||
|
||||
query_params.except!(*EXCLUDED_QUERY_PARAM_NAMES)
|
||||
cleanup_query_parameters!(query_params)
|
||||
|
||||
request.set_header(Rack::QUERY_STRING, Rack::Utils.build_nested_query(query_params))
|
||||
end
|
||||
|
||||
def cleanup_query_parameters!(params, level: 1)
|
||||
return params if params.empty? || level > NESTED_PARAMETERS_MAX_LEVEL
|
||||
|
||||
params.except!(*EXCLUDED_QUERY_PARAM_NAMES)
|
||||
params.each { |k, v| params[k] = cleanup_query_parameters!(v, level: level + 1) if v.is_a?(Hash) }
|
||||
end
|
||||
|
||||
def log(payload)
|
||||
Gitlab::AppLogger.warn(
|
||||
payload.merge(class_name: self.class.name)
|
||||
|
|
|
|||
|
|
@ -14,6 +14,10 @@ module Gitlab
|
|||
'https://observe.gitlab.com'
|
||||
end
|
||||
|
||||
def alerts_url
|
||||
"#{Gitlab::Observability.observability_url}/observability/v1/alerts"
|
||||
end
|
||||
|
||||
def should_enable_observability_auth_scopes?(resource)
|
||||
# Enable the needed oauth scopes if tracing is enabled.
|
||||
if resource.is_a?(Group) || resource.is_a?(Project)
|
||||
|
|
|
|||
|
|
@ -93,6 +93,10 @@ module GitlabSettings
|
|||
@options = to_hash.merge(other.deep_stringify_keys)
|
||||
end
|
||||
|
||||
def reverse_merge!(other)
|
||||
@options = to_hash.reverse_merge(other.deep_stringify_keys)
|
||||
end
|
||||
|
||||
def deep_merge(other)
|
||||
self.class.build(to_hash.deep_merge(other.deep_stringify_keys))
|
||||
end
|
||||
|
|
|
|||
|
|
@ -40314,9 +40314,6 @@ msgstr ""
|
|||
msgid "Please select a Jira project"
|
||||
msgstr ""
|
||||
|
||||
msgid "Please select a group"
|
||||
msgstr ""
|
||||
|
||||
msgid "Please select a project."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -72,6 +72,8 @@ module QA
|
|||
# Disable /dev/shm use in CI. See https://gitlab.com/gitlab-org/gitlab/issues/4252
|
||||
chrome_options[:args] << 'disable-dev-shm-usage' if QA::Runtime::Env.disable_dev_shm?
|
||||
|
||||
chrome_options[:args] << 'disable-search-engine-choice-screen'
|
||||
|
||||
# Allows chrome to consider all actions as secure when no ssl is used
|
||||
Runtime::Scenario.attributes[:gitlab_address].tap do |address|
|
||||
next unless address.start_with?('http://')
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ describe('RelatedIssuesBlock', () => {
|
|||
const findAddForm = () => wrapper.findByTestId('crud-form');
|
||||
const findAllRelatedIssuesList = () => wrapper.findAllComponents(RelatedIssuesList);
|
||||
const findRelatedIssuesList = (index) => findAllRelatedIssuesList().at(index);
|
||||
const findCrudComponent = () => wrapper.findComponent(CrudComponent);
|
||||
|
||||
const createComponent = ({
|
||||
pathIdSeparator = PathIdSeparator.Issue,
|
||||
|
|
@ -38,6 +39,8 @@ describe('RelatedIssuesBlock', () => {
|
|||
showCategorizedIssues = false,
|
||||
autoCompleteEpics = true,
|
||||
slots = '',
|
||||
headerText = '',
|
||||
addButtonText = '',
|
||||
} = {}) => {
|
||||
wrapper = shallowMountExtended(RelatedIssuesBlock, {
|
||||
propsData: {
|
||||
|
|
@ -50,6 +53,8 @@ describe('RelatedIssuesBlock', () => {
|
|||
relatedIssues,
|
||||
showCategorizedIssues,
|
||||
autoCompleteEpics,
|
||||
headerText,
|
||||
addButtonText,
|
||||
},
|
||||
provide: {
|
||||
reportAbusePath: '/report/abuse/path',
|
||||
|
|
@ -288,4 +293,32 @@ describe('RelatedIssuesBlock', () => {
|
|||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('headerText prop', () => {
|
||||
it('renders the title with headerText when set', () => {
|
||||
createComponent({ headerText: 'foo bar' });
|
||||
|
||||
expect(wrapper.findByTestId('crud-title').text()).toContain('foo bar');
|
||||
});
|
||||
|
||||
it('renders the issuable type title when headerText is empty', () => {
|
||||
createComponent({ headerText: '' });
|
||||
|
||||
expect(wrapper.findByTestId('crud-title').text()).toContain('Linked items');
|
||||
});
|
||||
});
|
||||
|
||||
describe('canAdmin=true and addButtonText prop', () => {
|
||||
it('sets the button text to addButtonText when set', () => {
|
||||
createComponent({ canAdmin: true, addButtonText: 'do foo' });
|
||||
|
||||
expect(findCrudComponent().props('toggleText')).toBe('do foo');
|
||||
});
|
||||
|
||||
it('uses the default button text when addButtonText is empty', () => {
|
||||
createComponent({ canAdmin: true, addButtonText: '' });
|
||||
|
||||
expect(findCrudComponent().props('toggleText')).toBe('Add');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -309,9 +309,10 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
|
|||
issue_url_regex = %r{\Ahttps://gitlab\.com/gitlab-org/gitlab/-/issues/\d+\z}
|
||||
|
||||
entries_with_issue_link.each do |entry|
|
||||
if entry.sharding_key.present?
|
||||
if entry.sharding_key.present? || entry.desired_sharding_key.present?
|
||||
expect(entry.sharding_key_issue_url).not_to be_present,
|
||||
"You must remove `sharding_key_issue_url` from #{entry.table_name} now that it has a valid sharding key." \
|
||||
"You must remove `sharding_key_issue_url` from #{entry.table_name} now that it " \
|
||||
"has a valid sharding key/desired sharding key."
|
||||
else
|
||||
expect(entry.sharding_key_issue_url).to match(issue_url_regex),
|
||||
"Invalid `sharding_key_issue_url` url for #{entry.table_name}. Please use the following format: " \
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
|
|||
end
|
||||
|
||||
let(:env) do
|
||||
path_with_query_params = [path, query_params.to_query.presence].compact.join('?')
|
||||
path_with_query_params = [path, querify(query_params).presence].compact.join('?')
|
||||
Rack::MockRequest.env_for(path_with_query_params, method: method)
|
||||
end
|
||||
|
||||
|
|
@ -41,15 +41,6 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
|
|||
end
|
||||
end
|
||||
|
||||
shared_examples 'excluded path' do
|
||||
it 'does not log anything' do
|
||||
expect(::Gitlab::PathTraversal).not_to receive(:path_traversal?)
|
||||
expect(::Gitlab::AppLogger).not_to receive(:warn)
|
||||
|
||||
expect(subject).to eq(fake_response)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'path traversal' do
|
||||
it 'logs the problem' do
|
||||
expect(::Gitlab::PathTraversal)
|
||||
|
|
@ -84,7 +75,7 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
|
|||
'/foo/bar' | { x: 'foo' } | 'no issue'
|
||||
'/foo/bar' | { x: 'foo/../bar' } | 'path traversal'
|
||||
'/foo/bar' | { x: 'foo%2Fbar' } | 'no issue'
|
||||
'/foo/bar' | { x: 'foo%2F..%2Fbar' } | 'no issue'
|
||||
'/foo/bar' | { x: 'foo%2F..%2Fbar' } | 'path traversal'
|
||||
'/foo/bar' | { x: 'foo%252F..%252Fbar' } | 'no issue'
|
||||
'/foo%2F..%2Fbar' | { x: 'foo%252F..%252Fbar' } | 'path traversal'
|
||||
|
||||
|
|
@ -98,11 +89,40 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
|
|||
described_class::EXCLUDED_QUERY_PARAM_NAMES.each do |param_name|
|
||||
context "with the excluded query parameter #{param_name}" do
|
||||
let(:path) { '/foo/bar' }
|
||||
let(:query_params) { { param_name => 'an%2F..%2Fattempt', :x => 'test' } }
|
||||
let(:query_params) { { param_name => 'an/../attempt', :x => 'test' } }
|
||||
let(:decoded_fullpath) { '/foo/bar?x=test' }
|
||||
|
||||
it_behaves_like 'no issue'
|
||||
end
|
||||
|
||||
context "with the excluded query parameter #{param_name} nested one level" do
|
||||
let(:path) { '/foo/bar' }
|
||||
let(:query_params) { { "level_1[#{param_name}]" => 'an/../attempt', :x => 'test' } }
|
||||
let(:decoded_fullpath) { '/foo/bar?x=test' }
|
||||
|
||||
it_behaves_like 'no issue'
|
||||
end
|
||||
|
||||
context "with the excluded query parameter #{param_name} nested two levels" do
|
||||
let(:path) { '/foo/bar' }
|
||||
let(:query_params) { { "level_1[level_2][#{param_name}]" => 'an/../attempt', :x => 'test' } }
|
||||
let(:decoded_fullpath) { '/foo/bar?x=test' }
|
||||
|
||||
it_behaves_like 'no issue'
|
||||
end
|
||||
|
||||
context "with the excluded query parameter #{param_name} nested above the max level" do
|
||||
let(:path) { '/foo/bar' }
|
||||
|
||||
let(:query_params) do
|
||||
{
|
||||
"level_1[level_2][level_3][level_4][level_5][level_6][#{param_name}]" => 'an/../attempt',
|
||||
:x => 'test'
|
||||
}
|
||||
end
|
||||
|
||||
it_behaves_like 'path traversal'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -145,5 +165,10 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Can't use params.to_query as #to_query will encode values
|
||||
def querify(params)
|
||||
params.map { |k, v| "#{k}=#{v}" }.join('&')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -168,6 +168,25 @@ RSpec.describe GitlabSettings::Options, :aggregate_failures, feature_category: :
|
|||
end
|
||||
end
|
||||
|
||||
describe '#reverse_merge!' do
|
||||
it 'merges in place with the existing options' do
|
||||
options.reverse_merge!(more: 'configs')
|
||||
|
||||
expect(options.to_hash).to eq(
|
||||
'foo' => { 'bar' => 'baz' },
|
||||
'more' => 'configs'
|
||||
)
|
||||
end
|
||||
|
||||
context 'when the merge hash replaces existing configs' do
|
||||
it 'merges in place with the duplicated options not replaced' do
|
||||
options.reverse_merge!(foo: 'configs')
|
||||
|
||||
expect(options.to_hash).to eq('foo' => { 'bar' => 'baz' })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#deep_merge' do
|
||||
it 'returns a new object with the options merged' do
|
||||
expect(options.deep_merge(foo: { more: 'configs' }).to_hash).to eq('foo' => {
|
||||
|
|
|
|||
|
|
@ -1326,55 +1326,6 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
|
|||
)
|
||||
end
|
||||
|
||||
context 'when merge request closing issues exist' do
|
||||
let_it_be(:issue2) { create(:issue, project: project) }
|
||||
|
||||
before do
|
||||
create(
|
||||
:merge_requests_closing_issues,
|
||||
issue: issue,
|
||||
merge_request: subject,
|
||||
from_mr_description: false
|
||||
)
|
||||
create(
|
||||
:merge_requests_closing_issues,
|
||||
issue: issue2,
|
||||
merge_request: subject,
|
||||
from_mr_description: true
|
||||
)
|
||||
end
|
||||
|
||||
context 'when new merge request closing issue records are created' do
|
||||
it 'triggers a workItemUpdated subscription for all affected work items (added/removed/updated)' do
|
||||
issue3 = create(:issue, project: project)
|
||||
|
||||
# issue is updated, issue 2 is removed, issue 3 is added
|
||||
WorkItem.where(id: [issue, issue2, issue3]).find_each do |work_item|
|
||||
expect(GraphqlTriggers).to receive(:work_item_updated).with(work_item).once.and_call_original
|
||||
end
|
||||
|
||||
expect do
|
||||
subject.update_columns(description: "Fixes #{issue.to_reference} Closes #{issue3.to_reference}")
|
||||
subject.cache_merge_request_closes_issues!(subject.author)
|
||||
end.to not_change { subject.merge_requests_closing_issues.count }.from(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when new merge request closing issue records are not created' do
|
||||
it 'triggers a workItemUpdated subscription for all affected work items (removed/updated)' do
|
||||
# issue is updated, issue 2 is removed
|
||||
WorkItem.where(id: [issue, issue2]).find_each do |work_item|
|
||||
expect(GraphqlTriggers).to receive(:work_item_updated).with(work_item).once.and_call_original
|
||||
end
|
||||
|
||||
expect do
|
||||
subject.update_columns(description: "Fixes #{issue.to_reference}")
|
||||
subject.cache_merge_request_closes_issues!(subject.author)
|
||||
end.to change { subject.merge_requests_closing_issues.count }.from(2).to(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not cache closed issues when merge request is closed' do
|
||||
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
|
||||
|
||||
|
|
|
|||
|
|
@ -1088,9 +1088,26 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
|
|||
end
|
||||
end
|
||||
|
||||
context 'while saving references to issues that the updated merge request closes' do
|
||||
let(:first_issue) { create(:issue, project: project) }
|
||||
let(:second_issue) { create(:issue, project: project) }
|
||||
context 'while saving references to issues that the updated merge request closes', :aggregate_failures do
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:group) { create(:group, :public) }
|
||||
let_it_be(:project) { create(:project, :private, :repository, group: group, developers: user) }
|
||||
let_it_be(:merge_request, refind: true) { create(:merge_request, :simple, :unchanged, source_project: project) }
|
||||
let_it_be(:first_issue) { create(:issue, project: project) }
|
||||
let_it_be(:second_issue) { create(:issue, project: project) }
|
||||
|
||||
shared_examples 'merge request update that triggers work item updated subscription' do
|
||||
it 'triggers a workItemUpdated subscription for all affected records' do
|
||||
service = described_class.new(project: project, current_user: user, params: update_params)
|
||||
allow(service).to receive(:execute_hooks)
|
||||
|
||||
WorkItem.where(id: issues_to_notify).find_each do |work_item|
|
||||
expect(GraphqlTriggers).to receive(:work_item_updated).with(work_item).once.and_call_original
|
||||
end
|
||||
|
||||
service.execute(merge_request)
|
||||
end
|
||||
end
|
||||
|
||||
it 'creates a `MergeRequestsClosingIssues` record marked as from_mr_description for each issue' do
|
||||
issue_closing_opts = { description: "Closes #{first_issue.to_reference} and #{second_issue.to_reference}" }
|
||||
|
|
@ -1126,6 +1143,34 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
|
|||
service.execute(merge_request.reload)
|
||||
end.to change { MergeRequestsClosingIssues.count }.from(3).to(1)
|
||||
end
|
||||
|
||||
it_behaves_like 'merge request update that triggers work item updated subscription' do
|
||||
let(:update_params) { { description: "Closes #{first_issue.to_reference}" } }
|
||||
let(:issues_to_notify) { [first_issue] }
|
||||
end
|
||||
|
||||
context 'when MergeRequestsClosingIssues already exist' do
|
||||
let_it_be(:third_issue) { create(:issue, project: project) }
|
||||
|
||||
before_all do
|
||||
merge_request.update!(description: "Closes #{first_issue.to_reference} and #{second_issue.to_reference}")
|
||||
merge_request.cache_merge_request_closes_issues!(user)
|
||||
end
|
||||
|
||||
context 'when description updates MergeRequestsClosingIssues records' do
|
||||
it_behaves_like 'merge request update that triggers work item updated subscription' do
|
||||
let(:update_params) { { description: "Closes #{third_issue.to_reference} and #{second_issue.to_reference}" } }
|
||||
let(:issues_to_notify) { [first_issue, second_issue, third_issue] }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when description is not updated' do
|
||||
it_behaves_like 'merge request update that triggers work item updated subscription' do
|
||||
let(:update_params) { { state_event: 'close' } }
|
||||
let(:issues_to_notify) { [first_issue, second_issue] }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'updating asssignee_ids' do
|
||||
|
|
|
|||
Loading…
Reference in New Issue