Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-04-09 00:11:46 +00:00
parent 1561d0c1f5
commit 49e0bb5312
36 changed files with 692 additions and 124 deletions

View File

@ -1 +1 @@
580fbc59e1e9edbdec2dc213f890affe3125373f
96892bcaa0e6b6ff3727d5b312cab22e8dcb9d59

View File

@ -65,6 +65,14 @@ export const TOKENS = [
{ value: 'two_factor_disabled', title: __('Off') },
],
},
{
title: __('Type'),
type: 'type',
token: GlFilteredSearchToken,
operators: OPERATORS_IS,
unique: true,
options: [{ value: 'placeholder', title: s__('UserMapping|Placeholder') }],
},
];
export const SOLO_OWNED_ORGANIZATIONS_REQUESTED_COUNT = 10;

View File

@ -2,7 +2,6 @@
import { mapState } from 'pinia';
import DiffsFileTree from '~/diffs/components/diffs_file_tree.vue';
import { useDiffsList } from '~/rapid_diffs/stores/diffs_list';
import { DIFF_FILE_MOUNTED } from '~/rapid_diffs/dom_events';
import { useFileBrowser } from '~/diffs/stores/file_browser';
export default {
@ -14,12 +13,6 @@ export default {
...mapState(useDiffsList, ['loadedFiles']),
...mapState(useFileBrowser, ['fileBrowserVisible']),
},
created() {
document.addEventListener(DIFF_FILE_MOUNTED, this.addLoadedFile);
},
beforeDestroy() {
document.removeEventListener(DIFF_FILE_MOUNTED, this.addLoadedFile);
},
methods: {
clickFile(file) {
this.$emit('clickFile', file);

View File

@ -9,6 +9,8 @@ import { useDiffsView } from '~/rapid_diffs/stores/diffs_view';
import { initHiddenFilesWarning } from '~/rapid_diffs/app/init_hidden_files_warning';
import { createAlert } from '~/alert';
import { __ } from '~/locale';
import { fixWebComponentsStreamingOnSafari } from '~/rapid_diffs/app/safari_fix';
import { DIFF_FILE_MOUNTED } from '~/rapid_diffs/dom_events';
// This facade interface joins together all the bits and pieces of Rapid Diffs: DiffFile, Settings, File browser, etc.
// It's a unified entrypoint for Rapid Diffs and all external communications should happen through this interface.
@ -19,6 +21,10 @@ class RapidDiffsFacade {
init() {
this.#registerCustomElements();
fixWebComponentsStreamingOnSafari(
document.querySelector('[data-diffs-list]'),
this.DiffFileImplementation,
);
const { reloadStreamUrl, metadataEndpoint, diffFilesEndpoint } =
document.querySelector('[data-rapid-diffs]').dataset;
useDiffsView(pinia).metadataEndpoint = metadataEndpoint;
@ -40,6 +46,7 @@ class RapidDiffsFacade {
});
});
initViewSettings({ pinia, streamUrl: reloadStreamUrl });
document.addEventListener(DIFF_FILE_MOUNTED, useDiffsList(pinia).addLoadedFile);
}
// eslint-disable-next-line class-methods-use-this
@ -58,9 +65,9 @@ class RapidDiffsFacade {
}
#registerCustomElements() {
customElements.define('diff-file', this.DiffFileImplementation);
customElements.define('diff-file-mounted', DiffFileMounted);
customElements.define('streaming-error', StreamingError);
window.customElements.define('diff-file', this.DiffFileImplementation);
window.customElements.define('diff-file-mounted', DiffFileMounted);
window.customElements.define('streaming-error', StreamingError);
}
}

View File

@ -0,0 +1,28 @@
import { throttle } from 'lodash';
// Safari doesn't consider custom elements as Web Components when streaming ¯\_(ツ)_/¯
export const fixWebComponentsStreamingOnSafari = (elementToObserve, DiffFileImplementation) => {
const isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
if (!isSafari) return;
const observer = new MutationObserver(
throttle(
() => {
document.querySelectorAll('diff-file-mounted:not([mounted])').forEach((diffFileMounted) => {
diffFileMounted.setAttribute('mounted', 'true');
const diffFile = diffFileMounted.parentElement;
if (diffFile instanceof DiffFileImplementation) return;
Object.setPrototypeOf(diffFile, DiffFileImplementation.prototype);
Object.assign(diffFile, new DiffFileImplementation(diffFile));
diffFile.mount();
});
},
200,
{ trailing: true },
),
);
observer.observe(elementToObserve, {
attributes: false,
childList: true,
subtree: true,
});
};

View File

@ -3,7 +3,6 @@ import { debounce } from 'lodash';
import { renderHtmlStreams } from '~/streaming/render_html_streams';
import { toPolyfillReadable } from '~/streaming/polyfills';
import { DiffFile } from '~/rapid_diffs/diff_file';
import { DIFF_FILE_MOUNTED } from '~/rapid_diffs/dom_events';
import { performanceMarkAndMeasure } from '~/performance/utils';
export const statuses = {
@ -49,13 +48,7 @@ export const useDiffsList = defineStore('diffsList', {
},
async renderDiffsStream(stream, container, signal) {
this.status = statuses.streaming;
const addLoadedFile = this.addLoadedFile.bind(this);
document.addEventListener(DIFF_FILE_MOUNTED, addLoadedFile);
try {
await renderHtmlStreams([stream], container, { signal });
} finally {
document.removeEventListener(DIFF_FILE_MOUNTED, addLoadedFile);
}
await renderHtmlStreams([stream], container, { signal });
this.status = statuses.idle;
},
streamRemainingDiffs(url) {

View File

@ -33,6 +33,8 @@ module Clusters
)
if migration.save
Clusters::Migration::InstallAgentWorker.perform_async(migration.id)
ServiceResponse.success
else
error_response(message: migration.errors.full_messages)

View File

@ -0,0 +1,153 @@
# frozen_string_literal: true
module Clusters
module Migration
class InstallAgentService
NAMESPACE_LENGTH_LIMIT = 63
delegate :cluster, :agent, to: :migration, private: true
delegate :kubeclient, to: :cluster, private: true
def initialize(migration)
@migration = migration
end
def execute
return unless can_install_agent?
kubeclient.create_or_update_service_account(service_account_resource)
kubeclient.create_or_update_cluster_role_binding(cluster_role_binding_resource)
kubeclient.create_pod(helm_install_pod_resource)
update_status!(:success)
rescue StandardError => e
update_status!(:error, message: e.class)
end
private
attr_reader :migration
def can_install_agent?
migration.agent_install_status_pending? && cluster.connection_status == :connected
end
def service_account_name
'install-gitlab-agent'
end
def service_account_namespace
'default'
end
def service_account_resource
Gitlab::Kubernetes::ServiceAccount.new(service_account_name, service_account_namespace).generate
end
def cluster_role_binding_resource
subjects = [{ kind: 'ServiceAccount', name: service_account_name, namespace: service_account_namespace }]
Gitlab::Kubernetes::ClusterRoleBinding.new(service_account_name, 'cluster-admin', subjects).generate
end
def helm_install_pod_resource
::Kubeclient::Resource.new(metadata: helm_install_pod_metadata, spec: helm_install_pod_spec)
end
def helm_install_pod_metadata
{
name: service_account_name,
namespace: service_account_namespace
}
end
def helm_install_pod_spec
{
containers: [{
name: 'helm',
image: helm_install_image,
env: [{
name: 'INSTALL_COMMAND', value: install_command
}],
command: %w[/bin/sh],
args: %w[-c $(INSTALL_COMMAND)]
}],
serviceAccountName: service_account_name,
restartPolicy: 'Never'
}
end
def add_repository_command
'helm repo add gitlab https://charts.gitlab.io'
end
def update_repository_command
'helm repo update'
end
def install_command
[
add_repository_command,
update_repository_command,
helm_install_command
].compact.join("\n")
end
def helm_install_command
[
'helm',
'upgrade',
'--install',
agent.name,
'gitlab/gitlab-agent',
*namespace_flag,
'--create-namespace',
*image_tag_flag,
*token_flag,
*kas_address_flag
].shelljoin
end
def namespace_flag
['--namespace', agent_namespace]
end
def image_tag_flag
return if Gitlab.com? # rubocop:todo Gitlab/AvoidGitlabInstanceChecks -- GitLab.com uses the latest version, this check will be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/535030
['--set', "image.tag=v#{Gitlab::Kas.install_version_info}"]
end
def token_flag
['--set', "config.token=#{agent_token}"]
end
def kas_address_flag
['--set', "config.kasAddress=#{kas_address}"]
end
def agent_namespace
"gitlab-agent-#{agent.name}".first(NAMESPACE_LENGTH_LIMIT).parameterize
end
def helm_install_image
'registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image:helm-3.17.2-kube-1.32.3-alpine-3.21.3'
end
def agent_token
agent.agent_tokens.first.token
end
def kas_address
Gitlab::Kas.external_url
end
def update_status!(status, message: nil)
migration.update!(
agent_install_status: status,
agent_install_message: message
)
end
end
end
end

View File

@ -41,24 +41,11 @@ module ServicePing
{
metadata: {
uuid: service_ping_payload[:uuid],
metrics: metrics_collection_metadata(service_ping_payload)
metrics: Gitlab::Utils::UsageData.metrics_collection_metadata(service_ping_payload)
}
}
end
def metrics_collection_metadata(payload, parents = [])
return [] unless payload.is_a?(Hash)
payload.flat_map do |key, metric_value|
key_path = parents.dup.append(key)
if metric_value.respond_to?(:duration)
{ name: key_path.join('.'), time_elapsed: metric_value.duration, error: metric_value.error }.compact
else
metrics_collection_metadata(metric_value, key_path)
end
end
end
def submit_payload(payload, path: USAGE_DATA_PATH)
Gitlab::HTTP.post(
URI.join(base_url, path),

View File

@ -1449,6 +1449,16 @@
:idempotent: false
:tags: []
:queue_namespace: :gcp_cluster
- :name: gcp_cluster:clusters_migration_install_agent
:worker_name: Clusters::Migration::InstallAgentWorker
:feature_category: :deployment_management
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
:queue_namespace: :gcp_cluster
- :name: gcp_cluster:wait_for_cluster_creation
:worker_name: WaitForClusterCreationWorker
:feature_category: :deployment_management

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module Clusters
module Migration
class InstallAgentWorker
include ApplicationWorker
include ClusterQueue
deduplicate :until_executed, including_scheduled: true
idempotent!
urgency :low
data_consistency :delayed
def perform(migration_id)
migration = Clusters::AgentMigration.find_by_id(migration_id)
return unless migration.present?
Clusters::Migration::InstallAgentService.new(migration).execute
end
end
end
end

View File

@ -75,6 +75,7 @@ class GitlabServicePingWorker # rubocop:disable Scalability/IdempotentWorker
record = {
recorded_at: payload[:recorded_at],
payload: payload,
metadata: Gitlab::Utils::UsageData.metrics_collection_metadata(payload),
created_at: Time.current,
updated_at: Time.current,
organization_id: Organizations::Organization.first.id

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddMetadataFieldToNonSqlServicePing < Gitlab::Database::Migration[2.2]
milestone '17.11'
def change
add_column(:non_sql_service_pings, :metadata, :jsonb)
end
end

View File

@ -0,0 +1 @@
40c4523bd34cd22d51da87e9a4efdfb2e0b07cc10ce45075f0b36d788e3950b9

View File

@ -18121,7 +18121,8 @@ CREATE TABLE non_sql_service_pings (
updated_at timestamp with time zone NOT NULL,
recorded_at timestamp with time zone NOT NULL,
payload jsonb NOT NULL,
organization_id bigint NOT NULL
organization_id bigint NOT NULL,
metadata jsonb
);
CREATE SEQUENCE non_sql_service_pings_id_seq

View File

@ -195,7 +195,7 @@ Gitaly and GitLab use two shared secrets for authentication:
gitlab_shell['secret_token'] = 'shellsecret'
```
On all nodes running Gitaly, edit `/etc/gitlab/gitlab.rb`:
On all nodes running Gitaly, edit `/etc/gitlab/gitlab.rb`:
```ruby
gitaly['gitlab_secret'] = 'shellsecret'

View File

@ -209,8 +209,11 @@ dast:
### Site profile validation
Site profile validation reduces the risk of running an active scan against the wrong website. A site
must be validated before an active scan can run against it. Each of the site validation methods are
equivalent in functionality, so use whichever is most suitable:
must be validated before an active scan can run against it. Site profile validation is not a security feature.
If necessary, you can run an active DAST scan without validation by using a
[pipeline scan](browser/configuration/enabling_the_analyzer.md).
Each of the site validation methods are equivalent in functionality, so use whichever is most suitable:
- **Text file validation**: Requires a text file be uploaded to the target site. The text file is
allocated a name and content that is unique to the project. The validation process checks the

View File

@ -39,6 +39,8 @@ To set up GitLab Duo with Amazon Q, you must:
- [Complete the prerequisites](#prerequisites)
- [Create an identity provider](#create-an-iam-identity-provider)
- [Create an IAM role](#create-an-iam-role)
- [Add the policy](#add-the-policy)
- [Allow administrators to use customer managed keys](#allow-administrators-to-use-customer-managed-keys)
- [Enter the ARN in GitLab and enable Amazon Q](#enter-the-arn-in-gitlab-and-enable-amazon-q)
- [Add the Amazon Q user to your project](#add-the-amazon-q-user-to-your-project)
@ -94,7 +96,7 @@ After you set up the IAM role, you cannot change the AWS account that's associat
1. Skip **Permissions policies** by selecting **Next**. You will create an inline policy later.
1. Ensure the trust policy is correct. It should look like this:
```plaintext
```json
{
"Version": "2012-10-17",
"Statement": [
@ -117,6 +119,8 @@ After you set up the IAM role, you cannot change the AWS account that's associat
1. Name the role, for example `QDeveloperAccess`, and select **Create role**.
### Add the policy
Now edit the role and add the policy:
1. Find the role that you just created and select it.
@ -133,21 +137,42 @@ Now edit the role and add the policy:
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "GitLabDuoPermissions",
"Effect": "Allow",
"Action": [
"q:SendEvent",
"q:CreateOAuthAppConnection",
"q:CreateAuthGrant",
"q:UpdateAuthGrant",
"q:UpdateOAuthAppConnection"
],
"Resource": "*"
}
]
"Version": "2012-10-17",
"Statement": [
{
"Sid": "GitLabDuoUsagePermissions",
"Effect": "Allow",
"Action": [
"q:SendEvent",
"q:CreateAuthGrant",
"q:UpdateAuthGrant",
"q:GenerateCodeRecommendations",
"q:SendMessage",
"q:ListPlugins",
"q:VerifyOAuthAppConnection"
],
"Resource": "*"
},
{
"Sid": "GitLabDuoManagementPermissions",
"Effect": "Allow",
"Action": [
"q:CreateOAuthAppConnection",
"q:DeleteOAuthAppConnection",
],
"Resource": "*"
},
{
"Sid": "GitLabDuoPluginPermissions",
"Effect": "Allow",
"Action": [
"q:CreatePlugin",
"q:DeletePlugin",
"q:GetPlugin"
],
"Resource": "arn:aws:qdeveloper:*:*:plugin/GitLabDuoWithAmazonQ/*"
}
]
}
```
@ -160,6 +185,49 @@ Now edit the role and add the policy:
arn:aws:iam::123456789:role/QDeveloperAccess
```
#### Allow administrators to use customer managed keys
If you are an administrator, you can use AWS Key Management Service (AWS KMS)
customer managed keys (CMKs) to encrypt customer data.
Update the role policy to grant permission to use CMKs when you create your key policy on a configured role in the KMS console.
The `kms:ViaService` condition key limits the use of a KMS key to requests from specified AWS services.
Additionally, it's used to deny permission to use a KMS key when the request comes from particular services.
With the condition key, you can limit who can use CMK for encrypting or decrypting content.
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "Sid0",
"Effect": "Allow",
"Principal": {
"AWS": "arn:aws:iam::<awsAccountId>:role/<rolename>"
},
"Action": [
"kms:GenerateDataKeyWithoutPlaintext",
"kms:Decrypt",
"kms:ReEncryptFrom",
"kms:ReEncryptTo"
],
"Resource": "*",
"Condition": {
"StringEquals": {
"kms:ViaService": [
"q.<region>.amazonaws.com"
]
}
}
}
]
}
```
For more information, see
[`kms:ViaService` in the AWS KMS Developer Guide](https://docs.aws.amazon.com/kms/latest/developerguide/conditions-kms.html#conditions-kms-via-service).
### Enter the ARN in GitLab and enable Amazon Q
Now, enter the ARN into GitLab and determine which groups and projects can access the feature.

View File

@ -15,10 +15,11 @@ title: Protected packages
{{< history >}}
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/416395) in GitLab 16.5 [with a flag](../../../administration/feature_flags.md) named `packages_protected_packages`. Disabled by default. This feature is an [experiment](../../../policy/development_stages_support.md).
- The protection rule setting **Push protected up to access level** [renamed](https://gitlab.com/gitlab-org/gitlab/-/issues/416382) to **Minimum access level for push** in GitLab 17.1
- The protection rule setting **Push protected up to access level** [renamed](https://gitlab.com/gitlab-org/gitlab/-/issues/416382) to **Minimum access level for push** in GitLab 17.1.
- [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/472655) in GitLab 17.5.
- [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/472655) in GitLab 17.6. Feature flag `packages_protected_packages` removed.
- Maven protected packages [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/323969) in GitLab 17.9 [with a flag](../../../administration/feature_flags.md) named `packages_protected_packages_maven`. Disabled by default. This feature is an [experiment](../../../policy/development_stages_support.md).
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/516215) in GitLab 17.10 [with a flag](../../../administration/feature_flags.md) named `packages_protected_packages_delete`. Disabled by default. This feature is an [experiment](../../../policy/development_stages_support.md).
- [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/497082) in GitLab 17.11. Feature flag `packages_protected_packages_maven` removed.
{{< /history >}}
@ -27,15 +28,16 @@ By default, any user with at least the Developer role can create,
edit, and delete packages. Add a package protection rule to restrict
which users can make changes to your packages.
GitLab supports only push protection for npm, pypi and maven packages, but [epic 5574](https://gitlab.com/groups/gitlab-org/-/epics/5574) proposes to add additional features and package formats.
GitLab supports package protection for npm, PyPI, and maven packages, but [epic 5574](https://gitlab.com/groups/gitlab-org/-/epics/5574) proposes to add additional features and package formats.
When a package is protected, the default behavior enforces these restrictions on the package:
| Action | Who can do it |
|:-----------------------------------------|:----------------------------------------------------------------------------------|
| Protect a package | At least the Maintainer role. |
| Push a new package | At least the role set in [**Minimum access level for push**](#protect-a-package). |
| Push a new package with a deploy token | Any valid deploy token, only if the pushed package is not matched by a protection rule. Protected packages cannot be pushed with a deploy token. |
| Action | Who can do it |
|:---------------------------------------|:----------------------------------------------------------------------------------|
| Protect a package | At least the Maintainer role. |
| Push a new package | At least the role set in [**Minimum access level for push**](#protect-a-package). |
| Push a new package with a deploy token | Any valid deploy token, only if the pushed package is not matched by a protection rule. Protected packages cannot be pushed with a deploy token. |
| Delete a package | At least the role set in [**Minimum access level for delete**](#protect-a-package). |
## Protect a package
@ -59,6 +61,7 @@ To protect a package:
- **Name pattern** is a package name pattern you want to protect. The pattern can include a wildcard (`*`).
- **Package type** is the type of package to protect.
- **Minimum access level for push** is the minimum role required to push a package matching the name pattern.
- **Minimum access level for delete** is the minimum role required to delete a package matching the name pattern.
1. Select **Protect**.
The package protection rule is created, and appears in the settings.

View File

@ -153,6 +153,9 @@ Prerequisites:
- You must have the Owner role for the project
- [A list of object IDs](#get-a-list-of-object-ids) to remove.
- Your project must not be:
- A fork of a public upstream project.
- A public upstream project with downstream forks.
To remove blobs from your repository:

View File

@ -8,7 +8,9 @@ module Gitlab
def add_metric(metric, time_frame: 'none', options: {})
metric_class = "Gitlab::Usage::Metrics::Instrumentations::#{metric}".constantize
metric_class.new(time_frame: time_frame, options: options).instrumentation
Gitlab::UsageData.with_metadata do
metric_class.new(time_frame: time_frame, options: options).instrumentation
end
end
def count(relation, column = nil, batch: true, batch_size: nil, start: nil, finish: nil)

View File

@ -208,7 +208,7 @@ module Gitlab
end
def redis_usage_data(counter = nil, &block)
with_metadata do
Gitlab::UsageData.with_metadata do
if block
redis_usage_counter(&block)
elsif counter.present?
@ -264,6 +264,19 @@ module Gitlab
end
end
def metrics_collection_metadata(payload, parents = [])
return [] unless payload.is_a?(Hash)
payload.flat_map do |key, metric_value|
key_path = parents.dup.append(key)
if metric_value.respond_to?(:duration)
{ name: key_path.join('.'), time_elapsed: metric_value.duration, error: metric_value.error }.compact
else
metrics_collection_metadata(metric_value, key_path)
end
end
end
private
def prometheus_client(verify:)

View File

@ -15161,6 +15161,9 @@ msgstr ""
msgid "ComplianceFrameworksReport|Compliance framework ID"
msgstr ""
msgid "ComplianceFrameworksReport|Control ID copied to clipboard."
msgstr ""
msgid "ComplianceFrameworksReport|Controls"
msgstr ""
@ -15389,6 +15392,12 @@ msgstr ""
msgid "ComplianceFrameworks|No frameworks found. Create a framework in top-level group %{linkStart}namespace%{linkEnd} to assign it to a project."
msgstr ""
msgid "ComplianceFrameworks|No projects found"
msgstr ""
msgid "ComplianceFrameworks|No projects found that match filters"
msgstr ""
msgid "ComplianceFrameworks|Note: This report was truncated to adhere to filesize limits of %{size}."
msgstr ""

View File

@ -193,7 +193,6 @@ spec/frontend/vue_shared/components/filtered_search_bar/tokens/date_token_spec.j
spec/frontend/vue_shared/components/metric_images/metric_image_details_modal_spec.js
spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js
spec/frontend/vue_shared/components/registry/code_instruction_spec.js
spec/frontend/vue_shared/components/smart_virtual_list_spec.js
spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js
spec/frontend/vue_shared/components/upload_dropzone/upload_dropzone_spec.js
spec/frontend/vue_shared/directives/tooltip_on_truncate_spec.js

View File

@ -484,7 +484,7 @@ RSpec.describe 'Database schema',
"Sbom::Occurrence" => %w[ancestors],
"Security::ApprovalPolicyRule" => %w[content],
"Security::Policy" => %w[metadata],
"ServicePing::NonSqlServicePing" => %w[payload], # Usage data payload changes often, we cannot use one schema
"ServicePing::NonSqlServicePing" => %w[payload metadata], # Usage data payloads change often, we cannot use one schema
"ServicePing::QueriesServicePing" => %w[payload], # Usage data payload changes often, we cannot use one schema
"Security::ScanExecutionPolicyRule" => %w[content],
"Security::VulnerabilityManagementPolicyRule" => %w[content],

View File

@ -4,6 +4,7 @@ FactoryBot.define do
factory :non_sql_service_ping, class: 'ServicePing::NonSqlServicePing' do
recorded_at { Time.current }
payload { { test: 'test' } }
metadata { { name: 'test', time_elapsed: 100, error: nil } }
association :organization, factory: :organization
end
end

View File

@ -10,6 +10,8 @@ import { initHiddenFilesWarning } from '~/rapid_diffs/app/init_hidden_files_warn
import { initFileBrowser } from '~/rapid_diffs/app/init_file_browser';
import { StreamingError } from '~/rapid_diffs/streaming_error';
import { useDiffsView } from '~/rapid_diffs/stores/diffs_view';
import { fixWebComponentsStreamingOnSafari } from '~/rapid_diffs/app/safari_fix';
import { DIFF_FILE_MOUNTED } from '~/rapid_diffs/dom_events';
jest.mock('~/lib/graphql');
jest.mock('~/awards_handler');
@ -17,6 +19,7 @@ jest.mock('~/mr_notes/stores');
jest.mock('~/rapid_diffs/app/view_settings');
jest.mock('~/rapid_diffs/app/init_hidden_files_warning');
jest.mock('~/rapid_diffs/app/init_file_browser');
jest.mock('~/rapid_diffs/app/safari_fix');
describe('Rapid Diffs App', () => {
let app;
@ -27,6 +30,7 @@ describe('Rapid Diffs App', () => {
beforeEach(() => {
createTestingPinia();
useDiffsView(pinia).loadMetadata.mockResolvedValue();
initFileBrowser.mockResolvedValue();
setHTMLFixture(
`
@ -37,6 +41,13 @@ describe('Rapid Diffs App', () => {
);
});
beforeAll(() => {
Object.defineProperty(window, 'customElements', {
value: { define: jest.fn() },
writable: true,
});
});
it('initializes the app', async () => {
let res;
const mock = useDiffsView().loadMetadata.mockImplementationOnce(
@ -50,23 +61,33 @@ describe('Rapid Diffs App', () => {
expect(useDiffsView().metadataEndpoint).toBe('/metadata');
expect(mock).toHaveBeenCalled();
expect(initViewSettings).toHaveBeenCalledWith({ pinia, streamUrl: '/reload' });
expect(window.customElements.get('diff-file')).toBe(DiffFile);
expect(window.customElements.get('diff-file-mounted')).toBe(DiffFileMounted);
expect(window.customElements.get('streaming-error')).toBe(StreamingError);
expect(window.customElements.define).toHaveBeenCalledWith('diff-file', DiffFile);
expect(window.customElements.define).toHaveBeenCalledWith('diff-file-mounted', DiffFileMounted);
expect(window.customElements.define).toHaveBeenCalledWith('streaming-error', StreamingError);
await res();
expect(initHiddenFilesWarning).toHaveBeenCalled();
expect(fixWebComponentsStreamingOnSafari).toHaveBeenCalled();
expect(initFileBrowser).toHaveBeenCalledWith('/diff-files-metadata');
});
it('streams remaining diffs', () => {
createApp();
app.init();
app.streamRemainingDiffs();
expect(useDiffsList().streamRemainingDiffs).toHaveBeenCalledWith('/stream');
});
it('reloads diff files', () => {
createApp();
app.init();
app.reloadDiffs();
expect(useDiffsList().reloadDiffs).toHaveBeenCalledWith('/reload');
});
it('reacts to files loading', () => {
createApp();
app.init();
document.dispatchEvent(new CustomEvent(DIFF_FILE_MOUNTED));
expect(useDiffsList(pinia).addLoadedFile).toHaveBeenCalled();
});
});

View File

@ -6,7 +6,6 @@ import { renderHtmlStreams } from '~/streaming/render_html_streams';
import waitForPromises from 'helpers/wait_for_promises';
import { toPolyfillReadable } from '~/streaming/polyfills';
import { DiffFile } from '~/rapid_diffs/diff_file';
import { DIFF_FILE_MOUNTED } from '~/rapid_diffs/dom_events';
import { performanceMarkAndMeasure } from '~/performance/utils';
jest.mock('~/streaming/polyfills');
@ -55,33 +54,6 @@ describe('Diffs list store', () => {
});
};
const itAddsLoadingFilesWhileStreaming = (action) => {
it('adds loading files while streaming', async () => {
let resolveRequest;
let resolveStreamRender;
global.fetch.mockImplementation(() => {
return new Promise((resolve) => {
resolveRequest = resolve;
});
});
renderHtmlStreams.mockImplementation(() => {
return new Promise((resolve) => {
resolveStreamRender = resolve;
});
});
action();
resolveRequest({ body: {} });
await waitForPromises();
const element = document.createElement('div');
element.id = 'foo';
document.body.appendChild(element);
element.dispatchEvent(new CustomEvent(DIFF_FILE_MOUNTED, { bubbles: true }));
resolveStreamRender();
await waitForPromises();
expect(store.loadedFiles).toStrictEqual({ foo: true });
});
};
beforeEach(() => {
const pinia = createTestingPinia({ stubActions: false });
setActivePinia(pinia);
@ -137,7 +109,6 @@ describe('Diffs list store', () => {
itCancelsRunningRequest(() => store.streamRemainingDiffs('/stream'));
itSetsStatuses(() => store.streamRemainingDiffs('/stream'));
itAddsLoadingFilesWhileStreaming(() => store.streamRemainingDiffs('/stream'));
});
describe('#reloadDiffs', () => {
@ -154,7 +125,6 @@ describe('Diffs list store', () => {
itCancelsRunningRequest(() => store.reloadDiffs('/stream'));
itSetsStatuses(() => store.reloadDiffs('/stream'));
itAddsLoadingFilesWhileStreaming(() => store.reloadDiffs('/stream'));
it('sets loading state', () => {
store.reloadDiffs('/stream');

View File

@ -1,8 +1,31 @@
import { mount } from '@vue/test-utils';
import VirtualList from 'vue-virtual-scroll-list';
import SmartVirtualScrollList from '~/vue_shared/components/smart_virtual_list.vue';
describe('Toggle Button', () => {
let vm;
// Mock the VirtualList component for Vue 3 compatibility
jest.mock('vue-virtual-scroll-list', () => {
return {
__esModule: true,
default: {
name: 'VirtualList',
render(createElement) {
return createElement(this.rtag, { class: 'js-virtual-list' }, [
createElement(this.wtag, { class: this.wclass }, this.$slots.default),
]);
},
props: {
size: Number,
remain: Number,
rtag: String,
wtag: String,
wclass: String,
},
},
};
});
describe('Smart Virtual List', () => {
let wrapper;
const createComponent = ({ length, remain }) => {
const smartListProperties = {
@ -15,45 +38,50 @@ describe('Toggle Button', () => {
remain,
};
const items = Array(length).fill(1);
// Use Vue 2 compatible approach for defining data
const Component = {
components: {
SmartVirtualScrollList,
},
smartListProperties,
items: Array(length).fill(1),
data() {
return {
smartListProperties,
items,
};
},
template: `
<smart-virtual-scroll-list v-bind="$options.smartListProperties">
<li v-for="(val, key) in $options.items" :key="key">{{ key + 1 }}</li>
<smart-virtual-scroll-list v-bind="smartListProperties">
<li v-for="(val, key) in items" :key="key">{{ key + 1 }}</li>
</smart-virtual-scroll-list>`,
};
return mount(Component).vm;
return mount(Component);
};
afterEach(() => {
vm.$destroy();
});
const findVirtualScrollList = () => wrapper.findComponent(SmartVirtualScrollList);
const findVirtualListItem = () => wrapper.findComponent(VirtualList);
describe('if the list is shorter than the maximum shown elements', () => {
const listLength = 10;
beforeEach(() => {
vm = createComponent({ length: listLength, remain: 20 });
wrapper = createComponent({ length: listLength, remain: 20 });
});
it('renders without the vue-virtual-scroll-list component', () => {
expect(vm.$el.classList).not.toContain('js-virtual-list');
expect(vm.$el.classList).toContain('js-plain-element');
expect(findVirtualListItem().exists()).toBe(false);
});
it('renders list with provided tags and classes for the wrapper elements', () => {
expect(vm.$el.tagName).toEqual('SECTION');
expect(vm.$el.firstChild.tagName).toEqual('UL');
expect(vm.$el.firstChild.classList).toContain('test-class');
expect(wrapper.element.tagName).toEqual('SECTION');
expect(wrapper.element.firstChild.tagName).toEqual('UL');
expect(wrapper.element.firstChild.classList.contains('test-class')).toBe(true);
});
it('renders all children list elements', () => {
expect(vm.$el.querySelectorAll('li').length).toEqual(listLength);
expect(wrapper.findAll('li').length).toEqual(listLength);
});
});
@ -61,22 +89,23 @@ describe('Toggle Button', () => {
const maxItemsShown = 20;
beforeEach(() => {
vm = createComponent({ length: 1000, remain: maxItemsShown });
wrapper = createComponent({ length: 1000, remain: maxItemsShown });
});
it('uses the vue-virtual-scroll-list component', () => {
expect(vm.$el.classList).toContain('js-virtual-list');
expect(vm.$el.classList).not.toContain('js-plain-element');
expect(findVirtualListItem().exists()).toBe(true);
});
it('renders list with provided tags and classes for the wrapper elements', () => {
expect(vm.$el.tagName).toEqual('SECTION');
expect(vm.$el.firstChild.tagName).toEqual('UL');
expect(vm.$el.firstChild.classList).toContain('test-class');
expect(findVirtualScrollList().props('rtag')).toEqual('section');
expect(findVirtualScrollList().props('wtag')).toEqual('ul');
expect(findVirtualScrollList().props('wclass')).toEqual('test-class');
});
it('renders at max twice the maximum shown elements', () => {
expect(vm.$el.querySelectorAll('li').length).toBeLessThanOrEqual(2 * maxItemsShown);
it('renders at least some list elements', () => {
// In our mocked version we can't reliably test exact counts
// since the virtualization logic is mocked
expect(wrapper.findAll('li').length).toBeGreaterThan(0);
});
});
});

View File

@ -477,6 +477,15 @@ describe('preserveDetailsState', () => {
`);
});
});
describe('createBranch', () => {
it('returns a "create branch" path when given fullPath', () => {
expect(createBranchMRApiPathHelper.createBranch('myGroup/myProject')).toBe(
'/myGroup/myProject/-/branches',
);
});
});
describe('createMR', () => {
const fullPath = 'gitlab-org/gitlab';
const workItemIID = '12';
@ -518,4 +527,17 @@ describe('createMR', () => {
'/foobar/gitlab-org/gitlab/-/merge_requests/new?merge_request%5Bissue_iid%5D=12&merge_request%5Bsource_branch%5D=12-fix',
);
});
it('returns url with encoded branch names', () => {
const path = createBranchMRApiPathHelper.createMR({
fullPath,
workItemIid: workItemIID,
sourceBranch: 'source-branch#1',
targetBranch: 'target-branch#1',
});
expect(path).toBe(
'/gitlab-org/gitlab/-/merge_requests/new?merge_request%5Bissue_iid%5D=12&merge_request%5Bsource_branch%5D=source-branch%231&merge_request%5Btarget_branch%5D=target-branch%231',
);
});
});

View File

@ -11,6 +11,10 @@ RSpec.describe Gitlab::UsageDataNonSqlMetrics do
it 'computes the metric value for given metric' do
expect(described_class.add_metric(metric)).to eq(Gitlab::CurrentSettings.uuid)
end
it 'records metadata' do
expect(described_class.add_metric(metric).duration).to be_present
end
end
describe '.count' do

View File

@ -471,9 +471,9 @@ RSpec.describe Gitlab::Utils::UsageData do
describe '#redis_usage_data' do
it 'records duration' do
expect(described_class).to receive(:with_metadata)
result = described_class.redis_usage_data
described_class.redis_usage_data
expect(result.duration).to be_present
end
context 'with block given' do

View File

@ -111,6 +111,15 @@ RSpec.describe Clusters::Migration::CreateService, feature_category: :deployment
expect(token.name).to eq(agent_name)
expect(token.created_by_user).to eq(user)
end
it 'schedules a worker to install the agent into the cluster' do
allow(Clusters::Migration::InstallAgentWorker).to receive(:perform_async).and_call_original
expect(response).to be_success
migration = Clusters::AgentMigration.last
expect(Clusters::Migration::InstallAgentWorker).to have_received(:perform_async).with(migration.id).once
end
end
context 'with a project cluster' do

View File

@ -0,0 +1,162 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Migration::InstallAgentService, feature_category: :deployment_management do
let_it_be_with_reload(:migration) { create(:cluster_agent_migration) }
let_it_be(:agent_token) { create(:cluster_agent_token, agent: migration.agent) }
let_it_be(:agent) { migration.agent }
let(:kubeclient) { instance_double(Gitlab::Kubernetes::KubeClient) }
let(:cluster_status) { :connected }
describe '#execute' do
let(:namespace) { "gitlab-agent-#{agent.name}" }
let(:kas_version) { Gitlab::Kas.install_version_info }
let(:kas_address) { Gitlab::Kas.external_url }
let(:helm_install_image) do
'registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image:helm-3.17.2-kube-1.32.3-alpine-3.21.3'
end
let(:install_command) do
<<~CMD
helm repo add gitlab https://charts.gitlab.io
helm repo update
helm upgrade --install #{agent.name} gitlab/gitlab-agent \
--namespace #{namespace} \
--create-namespace \
--set image.tag\\=v#{kas_version} \
--set config.token\\=#{agent_token.token} \
--set config.kasAddress\\=#{kas_address}
CMD
end
let(:service_account_resource) do
Kubeclient::Resource.new(metadata: { name: 'install-gitlab-agent', namespace: 'default' })
end
let(:cluster_role_binding_resource) do
Kubeclient::Resource.new(
metadata: { name: 'install-gitlab-agent' },
roleRef: { apiGroup: 'rbac.authorization.k8s.io', kind: 'ClusterRole', name: 'cluster-admin' },
subjects: [{ kind: 'ServiceAccount', name: 'install-gitlab-agent', namespace: 'default' }]
)
end
let(:install_pod_resource) do
Kubeclient::Resource.new(
metadata: {
name: 'install-gitlab-agent',
namespace: 'default'
},
spec: {
containers: [{
name: 'helm',
image: helm_install_image,
env: [{ name: 'INSTALL_COMMAND', value: install_command.strip }],
command: %w[/bin/sh],
args: %w[-c $(INSTALL_COMMAND)]
}],
serviceAccountName: 'install-gitlab-agent',
restartPolicy: 'Never'
}
)
end
subject(:service) { described_class.new(migration) }
before do
allow(migration.cluster).to receive_messages(kubeclient: kubeclient, connection_status: cluster_status)
end
it 'installs the agent and associated resources into the cluster' do
expect(kubeclient).to receive(:create_or_update_service_account)
.with(service_account_resource)
expect(kubeclient).to receive(:create_or_update_cluster_role_binding)
.with(cluster_role_binding_resource)
expect(kubeclient).to receive(:create_pod)
.with(install_pod_resource)
expect { service.execute }.to change { migration.agent_install_status }.from('pending').to('success')
end
context 'when running on GitLab.com' do
let(:install_command) do
<<~CMD
helm repo add gitlab https://charts.gitlab.io
helm repo update
helm upgrade --install #{agent.name} gitlab/gitlab-agent \
--namespace #{namespace} \
--create-namespace \
--set config.token\\=#{agent_token.token} \
--set config.kasAddress\\=#{kas_address}
CMD
end
before do
allow(Gitlab).to receive(:com?).and_return(true)
end
it 'does not specify an agent version' do
expect(kubeclient).to receive(:create_or_update_service_account)
.with(service_account_resource)
expect(kubeclient).to receive(:create_or_update_cluster_role_binding)
.with(cluster_role_binding_resource)
expect(kubeclient).to receive(:create_pod)
.with(install_pod_resource)
expect { service.execute }.to change { migration.agent_install_status }.from('pending').to('success')
end
end
context 'when an error is raised while creating resources' do
before do
allow(kubeclient).to receive(:create_or_update_service_account)
.and_raise(Kubeclient::HttpError.new(409, 'Conflict', nil))
end
it 'sets the migration status to error' do
service.execute
expect(migration.agent_install_status).to eq('error')
expect(migration.agent_install_message).to eq('Kubeclient::HttpError')
end
end
context 'when the cluster is not connected' do
let(:cluster_status) { :unreachable }
it 'does not provision any resources' do
expect(kubeclient).not_to receive(:create_namespace)
expect { service.execute }.not_to change { migration.agent_install_status }
end
end
context 'when the migration is already in progress' do
before do
migration.update!(agent_install_status: :in_progress)
end
it 'does not provision any resources' do
expect(kubeclient).not_to receive(:create_namespace)
expect { service.execute }.not_to change { migration.agent_install_status }
end
end
context 'when the migration has already completed' do
before do
migration.update!(agent_install_status: :success)
end
it 'does not provision any resources' do
expect(kubeclient).not_to receive(:create_namespace)
expect { service.execute }.not_to change { migration.agent_install_status }
end
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Migration::InstallAgentWorker, feature_category: :deployment_management do
let(:migration) { create(:cluster_agent_migration) }
it_behaves_like 'an idempotent worker' do
let(:job_args) { migration.id }
end
describe '#perform' do
let(:migration_id) { migration.id }
subject(:perform) { described_class.new.perform(migration_id) }
it 'calls the agent installation service' do
expect_next_instance_of(Clusters::Migration::InstallAgentService, migration) do |service|
expect(service).to receive(:execute).once
end
perform
end
context 'when the migration record no longer exists' do
let(:migration_id) { non_existing_record_id }
it 'completes without raising an error' do
expect { perform }.not_to raise_error
end
end
end
end

View File

@ -76,6 +76,7 @@ RSpec.describe GitlabServicePingWorker, :clean_gitlab_redis_shared_state, featur
record = create(
:non_sql_service_ping,
payload: { some_metric: 123 },
metadata: { name: 'some_metric', time_elapsed: 10, error: 'some error' },
recorded_at: non_sql_payload[:recorded_at]
)