Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-07-09 06:11:33 +00:00
parent 764d43d2fa
commit 281acb56f0
25 changed files with 482 additions and 72 deletions

View File

@ -1,7 +1,7 @@
[
{"name":"CFPropertyList","version":"3.0.7","platform":"ruby","checksum":"c45721614aca8d5eb6fa216f2ec28ec38de1a94505e9766a20e98745492c3c4c"},
{"name":"RedCloth","version":"4.3.4","platform":"ruby","checksum":"5231b2fdd91a933915cba330e5fd1a74025e77b56f57b7404c7191ebf2812297"},
{"name":"acme-client","version":"2.0.21","platform":"ruby","checksum":"e0a044f993cd26f0ba7f8b13a3b2b007ef864cfaa333075a2d8865b087297641"},
{"name":"acme-client","version":"2.0.22","platform":"ruby","checksum":"817534b743e2c93b3e498dad6b0f1a96a8e6df273bb04e37525d586a519176f7"},
{"name":"actioncable","version":"7.1.5.1","platform":"ruby","checksum":"764637b5b2d97b94e412d562c177bfd16b0fd769d55c98846362f5263e8aaa0d"},
{"name":"actionmailbox","version":"7.1.5.1","platform":"ruby","checksum":"c3c20589fe43e6fa88bba2d76a6f9805ffdd02531f4a9a4af8197d59f5a5360a"},
{"name":"actionmailer","version":"7.1.5.1","platform":"ruby","checksum":"b213d6d880b23b093ccfef3b4f87a3d27e4666442f71b5b634b2d19e19a49759"},

View File

@ -218,8 +218,8 @@ GEM
nkf
rexml
RedCloth (4.3.4)
acme-client (2.0.21)
base64 (~> 0.2.0)
acme-client (2.0.22)
base64 (~> 0.2)
faraday (>= 1.0, < 3.0.0)
faraday-retry (>= 1.0, < 3.0.0)
actioncable (7.1.5.1)

View File

@ -1,7 +1,7 @@
[
{"name":"CFPropertyList","version":"3.0.7","platform":"ruby","checksum":"c45721614aca8d5eb6fa216f2ec28ec38de1a94505e9766a20e98745492c3c4c"},
{"name":"RedCloth","version":"4.3.4","platform":"ruby","checksum":"5231b2fdd91a933915cba330e5fd1a74025e77b56f57b7404c7191ebf2812297"},
{"name":"acme-client","version":"2.0.21","platform":"ruby","checksum":"e0a044f993cd26f0ba7f8b13a3b2b007ef864cfaa333075a2d8865b087297641"},
{"name":"acme-client","version":"2.0.22","platform":"ruby","checksum":"817534b743e2c93b3e498dad6b0f1a96a8e6df273bb04e37525d586a519176f7"},
{"name":"actioncable","version":"7.2.2.1","platform":"ruby","checksum":"5b3b885075a80767d63cbf2b586cbf82466a241675b7985233f957abb01bffb4"},
{"name":"actionmailbox","version":"7.2.2.1","platform":"ruby","checksum":"896a47c2520f4507c75dde67c6ea1f5eec3a041fe7bfbf3568c4e0149a080e25"},
{"name":"actionmailer","version":"7.2.2.1","platform":"ruby","checksum":"b02ae523c32c8ad762d4db941e76f3c108c106030132247ee7a7b8c86bc7b21f"},

View File

@ -218,8 +218,8 @@ GEM
nkf
rexml
RedCloth (4.3.4)
acme-client (2.0.21)
base64 (~> 0.2.0)
acme-client (2.0.22)
base64 (~> 0.2)
faraday (>= 1.0, < 3.0.0)
faraday-retry (>= 1.0, < 3.0.0)
actioncable (7.2.2.1)

View File

@ -0,0 +1,14 @@
---
table_name: p_duo_workflows_checkpoints
classes:
- Ai::DuoWorkflows::Checkpoint
feature_categories:
- duo_workflow
description: Duo workflows langraph checkpoints
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/193398
milestone: '18.2'
gitlab_schema: gitlab_main_cell
sharding_key:
namespace_id: namespaces
project_id: projects
table_size: small

View File

@ -0,0 +1,44 @@
# frozen_string_literal: true
class CreatePDuoWorkflowsCheckpoints < Gitlab::Database::Migration[2.3]
include Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
milestone '18.2'
disable_ddl_transaction!
OPTIONS = {
primary_key: [:id, :created_at],
options: 'PARTITION BY RANGE (created_at)',
if_not_exists: true
}
def up
create_table :p_duo_workflows_checkpoints, **OPTIONS do |t| # rubocop:disable Migration/EnsureFactoryForTable -- see ee/spec/factories/ai/duo_workflows/workflow_checkpoints.rb
t.bigserial :id, null: false
t.bigint :workflow_id, null: false
t.bigint :project_id, index: true
t.timestamps_with_timezone null: false
t.bigint :namespace_id, index: true
t.text :thread_ts, null: false, limit: 255
t.text :parent_ts, null: true, limit: 255
t.jsonb :checkpoint, null: false
t.jsonb :metadata, null: false
t.index [:workflow_id, :thread_ts],
name: 'index_p_duo_workflows_checkpoints_thread'
end
add_multi_column_not_null_constraint(:p_duo_workflows_checkpoints, :project_id, :namespace_id)
add_concurrent_partitioned_foreign_key :p_duo_workflows_checkpoints, :namespaces, validate: true,
column: :namespace_id, on_delete: :cascade
add_concurrent_partitioned_foreign_key :p_duo_workflows_checkpoints, :projects, validate: true,
column: :project_id, on_delete: :cascade
add_concurrent_partitioned_foreign_key :p_duo_workflows_checkpoints, :duo_workflows_workflows, validate: true,
column: :workflow_id, on_delete: :cascade
end
def down
drop_table :p_duo_workflows_checkpoints
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class EnsureIdUniquenessForPDuoWorkflowsCheckpoints < Gitlab::Database::Migration[2.3]
include Gitlab::Database::PartitioningMigrationHelpers::UniquenessHelpers
milestone '18.2'
TABLE_NAME = :p_duo_workflows_checkpoints
SEQ_NAME = :p_duo_workflows_checkpoints_id_seq
def up
ensure_unique_id(TABLE_NAME, seq: SEQ_NAME)
end
def down
revert_ensure_unique_id(TABLE_NAME, seq: SEQ_NAME)
end
end

View File

@ -0,0 +1 @@
f6e6fa14f7b76688c5990ffa4362b20454a949fe60dec79411e5e20512163bb5

View File

@ -0,0 +1 @@
75c9f47e9ba7336fe9062340d0c0bd81c5adfd93cad38f17b22a37286821d095

View File

@ -153,6 +153,19 @@ RETURN NEW;
END
$$;
CREATE FUNCTION assign_p_duo_workflows_checkpoints_id_value() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."id" IS NOT NULL THEN
RAISE WARNING 'Manually assigning ids is not allowed, the value will be ignored';
END IF;
NEW."id" := nextval('p_duo_workflows_checkpoints_id_seq'::regclass);
RETURN NEW;
END
$$;
CREATE FUNCTION assign_p_knowledge_graph_tasks_id_value() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -19308,6 +19321,32 @@ CREATE SEQUENCE p_ci_workloads_id_seq
ALTER SEQUENCE p_ci_workloads_id_seq OWNED BY p_ci_workloads.id;
CREATE TABLE p_duo_workflows_checkpoints (
id bigint NOT NULL,
workflow_id bigint NOT NULL,
project_id bigint,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
namespace_id bigint,
thread_ts text NOT NULL,
parent_ts text,
checkpoint jsonb NOT NULL,
metadata jsonb NOT NULL,
CONSTRAINT check_70d1d05b50 CHECK ((num_nonnulls(namespace_id, project_id) = 1)),
CONSTRAINT check_b55c120f3f CHECK ((char_length(thread_ts) <= 255)),
CONSTRAINT check_e63817afa6 CHECK ((char_length(parent_ts) <= 255))
)
PARTITION BY RANGE (created_at);
CREATE SEQUENCE p_duo_workflows_checkpoints_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE p_duo_workflows_checkpoints_id_seq OWNED BY p_duo_workflows_checkpoints.id;
CREATE SEQUENCE p_knowledge_graph_enabled_namespaces_id_seq
START WITH 1
INCREMENT BY 1
@ -31033,6 +31072,9 @@ ALTER TABLE ONLY p_ci_stages
ALTER TABLE ONLY p_ci_workloads
ADD CONSTRAINT p_ci_workloads_pkey PRIMARY KEY (id, partition_id);
ALTER TABLE ONLY p_duo_workflows_checkpoints
ADD CONSTRAINT p_duo_workflows_checkpoints_pkey PRIMARY KEY (id, created_at);
ALTER TABLE ONLY p_knowledge_graph_enabled_namespaces
ADD CONSTRAINT p_knowledge_graph_enabled_namespaces_pkey PRIMARY KEY (id, namespace_id);
@ -37015,6 +37057,12 @@ CREATE INDEX index_p_ci_runner_machine_builds_on_runner_machine_id ON ONLY p_ci_
CREATE INDEX index_p_ci_workloads_on_project_id ON ONLY p_ci_workloads USING btree (project_id);
CREATE INDEX index_p_duo_workflows_checkpoints_on_namespace_id ON ONLY p_duo_workflows_checkpoints USING btree (namespace_id);
CREATE INDEX index_p_duo_workflows_checkpoints_on_project_id ON ONLY p_duo_workflows_checkpoints USING btree (project_id);
CREATE INDEX index_p_duo_workflows_checkpoints_thread ON ONLY p_duo_workflows_checkpoints USING btree (workflow_id, thread_ts);
CREATE UNIQUE INDEX index_p_knowledge_graph_enabled_namespaces_on_namespace_id ON ONLY p_knowledge_graph_enabled_namespaces USING btree (namespace_id);
CREATE INDEX index_p_knowledge_graph_enabled_namespaces_on_state ON ONLY p_knowledge_graph_enabled_namespaces USING btree (state);
@ -41871,6 +41919,8 @@ CREATE TRIGGER assign_p_ci_pipelines_id_trigger BEFORE INSERT ON p_ci_pipelines
CREATE TRIGGER assign_p_ci_stages_id_trigger BEFORE INSERT ON p_ci_stages FOR EACH ROW EXECUTE FUNCTION assign_p_ci_stages_id_value();
CREATE TRIGGER assign_p_duo_workflows_checkpoints_id_trigger BEFORE INSERT ON p_duo_workflows_checkpoints FOR EACH ROW EXECUTE FUNCTION assign_p_duo_workflows_checkpoints_id_value();
CREATE TRIGGER assign_p_knowledge_graph_tasks_id_trigger BEFORE INSERT ON p_knowledge_graph_tasks FOR EACH ROW EXECUTE FUNCTION assign_p_knowledge_graph_tasks_id_value();
CREATE TRIGGER assign_zoekt_tasks_id_trigger BEFORE INSERT ON zoekt_tasks FOR EACH ROW EXECUTE FUNCTION assign_zoekt_tasks_id_value();
@ -44827,6 +44877,9 @@ ALTER TABLE p_ci_build_sources
ALTER TABLE ONLY automation_rules
ADD CONSTRAINT fk_rails_025b519b8d FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE p_duo_workflows_checkpoints
ADD CONSTRAINT fk_rails_0320b7accd FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY incident_management_oncall_participants
ADD CONSTRAINT fk_rails_032b12996a FOREIGN KEY (oncall_rotation_id) REFERENCES incident_management_oncall_rotations(id) ON DELETE CASCADE;
@ -44842,6 +44895,9 @@ ALTER TABLE ONLY ip_restrictions
ALTER TABLE ONLY terraform_state_versions
ADD CONSTRAINT fk_rails_04f176e239 FOREIGN KEY (terraform_state_id) REFERENCES terraform_states(id) ON DELETE CASCADE;
ALTER TABLE p_duo_workflows_checkpoints
ADD CONSTRAINT fk_rails_0679151c27 FOREIGN KEY (workflow_id) REFERENCES duo_workflows_workflows(id) ON DELETE CASCADE;
ALTER TABLE ONLY issue_assignment_events
ADD CONSTRAINT fk_rails_07683f8e80 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL;
@ -46591,6 +46647,9 @@ ALTER TABLE ONLY work_item_select_field_values
ALTER TABLE ONLY clusters_integration_prometheus
ADD CONSTRAINT fk_rails_e44472034c FOREIGN KEY (cluster_id) REFERENCES clusters(id) ON DELETE CASCADE;
ALTER TABLE p_duo_workflows_checkpoints
ADD CONSTRAINT fk_rails_e449184b59 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_occurrence_identifiers
ADD CONSTRAINT fk_rails_e4ef6d027c FOREIGN KEY (occurrence_id) REFERENCES vulnerability_occurrences(id) ON DELETE CASCADE;

View File

@ -119,6 +119,45 @@ except those captured by `runit`.
| [Sentinel logs](#sentinel-logs) | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes |
| [Workhorse logs](#workhorse-logs) | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes |
## Accessing logs on Helm chart installations
On Helm chart installations, GitLab components send logs to `stdout`, which can be accessed by using `kubectl logs`.
Logs are also available in the pod at `/var/log/gitlab` for the lifetime of the pod.
### Pods with structured logs (subcomponent filtering)
Some pods include a `subcomponent` field that identifies the specific log type:
```shell
# Webservice pod logs (Rails application)
kubectl logs -l app=webservice -c webservice | jq 'select(."subcomponent"=="<subcomponent-key>")'
# Sidekiq pod logs (background jobs)
kubectl logs -l app=sidekiq | jq 'select(."subcomponent"=="<subcomponent-key>")'
```
The following log sections indicate the appropriate pod and subcomponent key where applicable.
### Other pods
For other GitLab components that don't use structured logs with subcomponents, you can access logs directly.
To find available pod selectors:
```shell
# List all unique app labels in use
kubectl get pods -o jsonpath='{range .items[*]}{.metadata.labels.app}{"\n"}{end}' | grep -v '^$' | sort | uniq
# For pods with app labels
kubectl logs -l app=<pod-selector>
# For specific pods (when app labels aren't available)
kubectl get pods
kubectl logs <pod-name>
```
For more Kubernetes troubleshooting commands, see the [Kubernetes cheat sheet](https://docs.gitlab.com/charts/troubleshooting/kubernetes_cheat_sheet/).
## `production_json.log`
This file is located at:
@ -126,6 +165,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/production_json.log` on Linux package installations.
- `/home/git/gitlab/log/production_json.log` on self-compiled installations.
On Helm Chart installations, the logs are available on the Webservice pods under the `subcomponent="production_json"` key.
It contains a structured log for Rails controller requests received from
GitLab, thanks to [Lograge](https://github.com/roidrage/lograge/).
Requests from the API are logged to a separate file in `api_json.log`.
@ -322,6 +363,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/api_json.log` on Linux package installations.
- `/home/git/gitlab/log/api_json.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Webservice pods under the `subcomponent="api_json"` key.
It helps you see requests made directly to the API. For example:
```json
@ -401,6 +444,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/application_json.log` on Linux package installations.
- `/home/git/gitlab/log/application_json.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="application_json"` key.
It helps you discover events happening in your instance such as user creation
and project deletion. For example:
@ -426,6 +471,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/integrations_json.log` on Linux package installations.
- `/home/git/gitlab/log/integrations_json.log` on self-compiled installations.
On Helm Chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="integrations_json"` key.
It contains information about [integration](../../user/project/integrations/_index.md)
activities, such as Jira, Asana, and irker services. It uses JSON format,
like this example:
@ -465,6 +512,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/kubernetes.log` on Linux package installations.
- `/home/git/gitlab/log/kubernetes.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq pods under the `subcomponent="kubernetes"` key.
It logs information related to [certificate-based clusters](../../user/project/clusters/_index.md), such as connectivity errors. Each line contains JSON that can be ingested by services like Elasticsearch and Splunk.
## `git_json.log`
@ -474,6 +523,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/git_json.log` on Linux package installations.
- `/home/git/gitlab/log/git_json.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq pods under the `subcomponent="git_json"` key.
GitLab has to interact with Git repositories, but in some rare cases
something can go wrong. If this happens, you need to know exactly what
happened. This log file contains all failed requests from GitLab to Git
@ -510,6 +561,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/audit_json.log` on Linux package installations.
- `/home/git/gitlab/log/audit_json.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="audit_json"` key.
Changes to group or project settings and memberships (`target_details`)
are logged to this file. For example:
@ -614,6 +667,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/sidekiq_client.log` on Linux package installations.
- `/home/git/gitlab/log/sidekiq_client.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Webservice pods under the `subcomponent="sidekiq_client"` key.
This file contains logging information about jobs before Sidekiq starts
processing them, such as before being enqueued.
@ -731,6 +786,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/importer.log` on Linux package installations.
- `/home/git/gitlab/log/importer.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq pods under the `subcomponent="importer"` key.
This file logs the progress of [project imports and migrations](../../user/project/import/_index.md).
## `exporter.log`
@ -740,6 +797,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/exporter.log` on Linux package installations.
- `/home/git/gitlab/log/exporter.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="exporter"` key.
It logs the progress of the export process.
## `features_json.log`
@ -749,6 +808,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/features_json.log` on Linux package installations.
- `/home/git/gitlab/log/features_json.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="features_json"` key.
The modification events from Feature flags in development of GitLab
are recorded in this file. For example:
@ -775,9 +836,11 @@ are recorded in this file. For example:
This file is located at:
- `/var/log/gitlab/gitlab-rails/ci_resource_group_json.log` on Linux package installations.
- `/var/log/gitlab/gitlab-rails/ci_resource_groups_json.log` on Linux package installations.
- `/home/git/gitlab/log/ci_resource_group_json.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="ci_resource_groups_json"` key.
It contains information about [resource group](../../ci/resource_groups/_index.md) acquisition. For example:
```json
@ -807,6 +870,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/auth_json.log` on Linux package installations.
- `/home/git/gitlab/log/auth_json.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="auth_json"` key.
This file contains the JSON version of the logs in `auth.log`, for example:
```json
@ -829,6 +894,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/graphql_json.log` on Linux package installations.
- `/home/git/gitlab/log/graphql_json.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="graphql_json"` key.
GraphQL queries are recorded in the file. For example:
```json
@ -843,6 +910,13 @@ GraphQL queries are recorded in the file. For example:
{{< /history >}}
This file is located at:
- `/var/log/gitlab/gitlab-rails/clickhouse.log` on Linux package installations.
- `/home/git/gitlab/log/clickhouse.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="clickhouse"` key.
The `clickhouse.log` file logs information related to the
[ClickHouse database client](../../integration/clickhouse.md) in GitLab.
@ -879,6 +953,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/web_hooks.log` on Linux package installations.
- `/home/git/gitlab/log/web_hooks.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq pods under the `subcomponent="web_hooks"` key.
The back-off, disablement, and re-enablement events for Webhook are recorded in this file. For example:
```json
@ -928,11 +1004,14 @@ are generated in a location based on your installation method:
{{< /details >}}
Contains details of GitLab [Database Load Balancing](../postgresql/database_load_balancing.md).
This file is located at:
- `/var/log/gitlab/gitlab-rails/database_load_balancing.log` on Linux package installations.
- `/home/git/gitlab/log/database_load_balancing.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="database_load_balancing"` key.
## `zoekt.log`
{{< details >}}
@ -954,6 +1033,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/zoekt.log` on Linux package installations.
- `/home/git/gitlab/log/zoekt.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="zoekt"` key.
## `elasticsearch.log`
{{< details >}}
@ -964,11 +1045,15 @@ This file is located at:
{{< /details >}}
This file logs information related to the Elasticsearch Integration, including
errors during indexing or searching Elasticsearch. This file is located at:
errors during indexing or searching Elasticsearch.
This file is located at:
- `/var/log/gitlab/gitlab-rails/elasticsearch.log` on Linux package installations.
- `/home/git/gitlab/log/elasticsearch.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="elasticsearch"` key.
Each line contains JSON that can be ingested by services like Elasticsearch and Splunk.
Line breaks have been added to the following example line for clarity:
@ -996,6 +1081,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/exceptions_json.log` on Linux package installations.
- `/home/git/gitlab/log/exceptions_json.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="exceptions_json"` key.
Each line contains JSON that can be ingested by Elasticsearch. For example:
```json
@ -1022,6 +1109,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/service_measurement.log` on Linux package installations.
- `/home/git/gitlab/log/service_measurement.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="service_measurement"` key.
It contains only a single structured log with measurements for each service execution.
It contains measurements such as the number of SQL calls, `execution_time`, `gc_stats`, and `memory usage`.
@ -1040,15 +1129,12 @@ For example:
{{< /details >}}
Geo stores structured log messages in a `geo.log` file. For Linux package installations,
this file is at `/var/log/gitlab/gitlab-rails/geo.log`.
This file is located at:
For Helm chart installations, it's stored in the Sidekiq pod, at `/var/log/gitlab/geo.log`.
It can be read by either directly accessing the file, or by using `kubectl` to fetch the Sidekiq logs, and subsequently filtering the results by `"subcomponent"=="geo"`. The example below uses `jq` to grab only Geo logs:
- `/var/log/gitlab/gitlab-rails/geo.log` on Linux package installations.
- `/home/git/gitlab/log/geo.log` on self-compiled installations.
```shell
kubectl logs -l app=sidekiq --max-log-requests=50 | jq 'select(."subcomponent"=="geo")'
```
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="geo"` key.
This file contains information about when Geo attempts to sync repositories
and files. Each line in the file contains a separate JSON entry that can be
@ -1069,6 +1155,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/update_mirror_service_json.log` on Linux package installations.
- `/home/git/gitlab/log/update_mirror_service_json.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq pods under the `subcomponent="update_mirror_service_json"` key.
This file contains information about LFS errors that occurred during project mirroring.
While we work to move other project mirroring errors into this log, the [general log](#productionlog)
can be used.
@ -1131,6 +1219,8 @@ The log file is located at:
- `/var/log/gitlab/gitlab-rails/llm.log` on Linux package installations.
- `/home/git/gitlab/log/llm.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Webservice pods under the `subcomponent="llm"` key.
## `epic_work_item_sync.log`
{{< details >}}
@ -1153,6 +1243,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/epic_work_item_sync.log` on Linux package installations.
- `/home/git/gitlab/log/epic_work_item_sync.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq and Webservice pods under the `subcomponent="epic_work_item_sync"` key.
## `secret_push_protection.log`
{{< details >}}
@ -1175,6 +1267,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/secret_push_protection.log` on Linux package installations.
- `/home/git/gitlab/log/secret_push_protection.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Webservice pods under the `subcomponent="secret_push_protection"` key.
## Registry logs
For Linux package installations, container registry logs are in `/var/log/gitlab/registry/current`.
@ -1246,6 +1340,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/product_usage_data.log` on Linux package installations.
- `/home/git/gitlab/log/product_usage_data.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Webservice pods under the `subcomponent="product_usage_data"` key.
It contains JSON-formatted logs of product usage events tracked through Snowplow. Each line in the file contains a separate JSON entry that can be ingested by services like Elasticsearch or Splunk. Line breaks were added to examples for legibility:
```json
@ -1362,7 +1458,7 @@ GitLab also tracks [Prometheus metrics for Praefect](../gitaly/monitoring.md#mon
For Linux package installations, the backup log is located at `/var/log/gitlab/gitlab-rails/backup_json.log`.
For Helm chart installations, the backup log is stored in the Toolbox pod, at `/var/log/gitlab/backup_json.log`.
On Helm chart installations, the backup log is stored in the Toolbox pod, at `/var/log/gitlab/backup_json.log`.
This log is populated when a [GitLab backup is created](../backup_restore/_index.md). You can use this log to understand how the backup process performed.
@ -1373,6 +1469,8 @@ This file is located at:
- `/var/log/gitlab/gitlab-rails/performance_bar_json.log` on Linux package installations.
- `/home/git/gitlab/log/performance_bar_json.log` on self-compiled installations.
On Helm chart installations, the logs are available on the Sidekiq pods under the `subcomponent="performance_bar_json"` key.
Performance bar statistics (currently only duration of SQL queries) are recorded
in that file. For example:

View File

@ -50,7 +50,7 @@ module Gitlab
This change was generated by
[gitlab-housekeeper](https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/gitlab-housekeeper)
using the #{keep_class} keep.
#{generated_by} using the `#{keep_class}` keep.
To provide feedback on your experience with `gitlab-housekeeper` please create an issue with the
label ~"GitLab Housekeeper" and consider pinging the author of this keep.
@ -89,6 +89,18 @@ module Gitlab
text[0...limit]
end
private
def generated_by
job_url = ENV['CI_JOB_URL']
if job_url
"in [CI](#{job_url})"
else
'locally'
end
end
end
end
end

View File

@ -22,7 +22,6 @@ module Gitlab
target_branch:,
target_project_id:
)
existing_merge_request = get_existing_merge_request(
source_project_id: source_project_id,
source_branch: source_branch,
@ -97,6 +96,19 @@ module Gitlab
end
end
def closed_merge_request_exists?(source_project_id:, source_branch:, target_branch:, target_project_id:)
data = request(:get, "/projects/#{target_project_id}/merge_requests", query: {
state: :closed,
source_branch: source_branch,
target_branch: target_branch,
source_project_id: source_project_id
})
return false if data.empty?
true
end
def get_existing_merge_request(source_project_id:, source_branch:, target_branch:, target_project_id:)
data = request(:get, "/projects/#{target_project_id}/merge_requests", query: {
state: :opened,

View File

@ -54,48 +54,19 @@ module Gitlab
end
change.keep_class ||= keep_class
branch_name = git.create_branch(change)
add_standard_change_data(change)
if change.aborted? || !@filter_identifiers.matches_filters?(change.identifiers)
# At this point the keep has already run and edited files so we need to
# restore the local working copy. We could simply checkout all
# changed_files but this is very risky as it could mean losing work that
# cannot be recovered. Instead we commit all the work to the branch and
# move on without pushing the branch.
git.in_branch(branch_name) do
git.create_commit(change)
end
next if skip_change_if_necessary(change, branch_name)
if change.aborted?
@logger.puts "Skipping change as it is marked aborted."
else
@logger.puts "Skipping change: #{change.identifiers} due to not matching filter."
end
@logger.puts "Modified files have been committed to branch #{branch_name.yellowish}," \
"but will not be pushed."
@logger.puts
next
end
# If no merge request exists yet, create an empty one to allow keeps to use the web URL.
unless @dry_run
merge_request = get_existing_merge_request(branch_name) || create(change, branch_name)
change.mr_web_url = merge_request['web_url']
end
setup_merge_request(change, branch_name) unless @dry_run
git.in_branch(branch_name) do
Gitlab::Housekeeper::Substitutor.perform(change)
git.create_commit(change)
end
print_change_details(change, branch_name)
create(change, branch_name) unless @dry_run
mrs_created_count += 1
@ -126,6 +97,35 @@ module Gitlab
change.labels << 'automation:gitlab-housekeeper-authored'
end
def skip_change_if_necessary(change, branch_name)
if change.aborted? || !@filter_identifiers.matches_filters?(change.identifiers) ||
(!@dry_run && has_closed_merge_request?(branch_name))
git.in_branch(branch_name) do
git.create_commit(change)
end
if change.aborted?
@logger.puts "Skipping change as it is marked aborted."
elsif !@filter_identifiers.matches_filters?(change.identifiers)
@logger.puts "Skipping change: #{change.identifiers} due to not matching filter."
else
@logger.puts "Skipping change as we have closed an MR for this branch #{branch_name}"
end
@logger.puts "Modified files have been committed to branch #{branch_name.yellowish}, " \
"but will not be pushed."
@logger.puts
return true
end
false
end
def setup_merge_request(change, branch_name)
merge_request = get_existing_merge_request(branch_name) || create(change, branch_name)
change.mr_web_url = merge_request['web_url']
end
def git
@git ||= ::Gitlab::Housekeeper::Git.new(logger: @logger, branch_from: @target_branch)
end
@ -149,7 +149,7 @@ module Gitlab
@logger.puts
@logger.puts '=> Description:'
@logger.puts change.description
@logger.puts change.mr_description
@logger.puts
if change.labels.present? || change.assignees.present? || change.reviewers.present?
@ -194,6 +194,15 @@ module Gitlab
)
end
def has_closed_merge_request?(branch_name)
gitlab_client.closed_merge_request_exists?(
source_project_id: housekeeper_fork_project_id,
source_branch: branch_name,
target_branch: @target_branch,
target_project_id: housekeeper_target_project_id
)
end
# We do not want to push code if the MR already has approvals as it will reset the approvals. Also we do not push
# if someone else has added commits already.
def self.should_push_code?(change, push_when_approved)

View File

@ -4,11 +4,14 @@ require 'spec_helper'
RSpec.describe ::Gitlab::Housekeeper::Change do
let(:change) { described_class.new }
let(:job_url) { nil }
before do
change.title = 'The title'
change.description = 'The description'
change.keep_class = Object
stub_env 'CI_JOB_URL', job_url
end
describe '#initialize' do
@ -48,13 +51,21 @@ RSpec.describe ::Gitlab::Housekeeper::Change do
This change was generated by
[gitlab-housekeeper](https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/gitlab-housekeeper)
using the Object keep.
locally using the `Object` keep.
To provide feedback on your experience with `gitlab-housekeeper` please create an issue with the
label ~"GitLab Housekeeper" and consider pinging the author of this keep.
MARKDOWN
)
end
context 'when generated in CI' do
let(:job_url) { 'https://gitlab.com/namespace/project/-/jobs/123456789' }
it 'includes CI job URL' do
expect(change.mr_description).to include("in [CI](#{job_url}) using the `Object` keep")
end
end
end
describe '#update_required?' do
@ -93,7 +104,7 @@ RSpec.describe ::Gitlab::Housekeeper::Change do
This change was generated by
[gitlab-housekeeper](https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/gitlab-housekeeper)
using the Object keep.
locally using the `Object` keep.
To provide feedback on your experience with `gitlab-housekeeper` please create an issue with the
label ~"GitLab Housekeeper" and consider pinging the author of this keep.
@ -118,7 +129,7 @@ RSpec.describe ::Gitlab::Housekeeper::Change do
This change was generated by
[gitlab-housekeeper](https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/gitlab-housekeeper)
using the Object keep.
locally using the `Object` keep.
To provide feedback on your experience with `gitlab-housekeeper` please create an issue with the
label ~"GitLab Housekeeper" and consider pinging the author of this keep.
@ -145,7 +156,7 @@ RSpec.describe ::Gitlab::Housekeeper::Change do
This change was generated by
[gitlab-housekeeper](https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/gitlab-housekeeper)
using the Object keep.
locally using the `Object` keep.
To provide feedback on your experience with `gitlab-housekeeper` please create an issue with the
label ~"GitLab Housekeeper" and consider pinging the author of this keep.

View File

@ -32,6 +32,8 @@ RSpec.describe ::Gitlab::Housekeeper::Git do
end
before do
stub_env 'CI_JOB_URL', nil
@previous_dir = Dir.pwd
Dir.chdir(repository_path)
@ -93,7 +95,7 @@ RSpec.describe ::Gitlab::Housekeeper::Git do
This change was generated by
[gitlab-housekeeper](https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/gitlab-housekeeper)
using the Object keep.
locally using the `Object` keep.
To provide feedback on your experience with `gitlab-housekeeper` please create an issue with the
label ~"GitLab Housekeeper" and consider pinging the author of this keep.

View File

@ -75,6 +75,9 @@ RSpec.describe ::Gitlab::Housekeeper::Runner do
allow(gitlab_client).to receive(:non_housekeeper_changes)
.and_return([])
allow(gitlab_client).to receive(:closed_merge_request_exists?)
.and_return(false)
allow(::Gitlab::Housekeeper::Shell).to receive(:execute)
end
@ -255,6 +258,55 @@ RSpec.describe ::Gitlab::Housekeeper::Runner do
expect(result).to eq(expected_result)
end
end
context 'when a closed merge request exists' do
let(:closed_mr_change) { create_change(identifiers: ['closed-mr-branch']) }
let(:fake_keep_with_closed_mr) { instance_double(Class) }
let(:closed_mr_git) { instance_double(::Gitlab::Housekeeper::Git) }
let(:closed_mr_gitlab_client) { instance_double(::Gitlab::Housekeeper::GitlabClient) }
before do
stub_env('HOUSEKEEPER_FORK_PROJECT_ID', '123')
stub_env('HOUSEKEEPER_TARGET_PROJECT_ID', '456')
fake_keep_instance = instance_double(::Gitlab::Housekeeper::Keep)
allow(fake_keep_with_closed_mr).to receive(:new).and_return(fake_keep_instance)
allow(fake_keep_instance).to receive(:each_change).and_yield(closed_mr_change)
allow(::Gitlab::Housekeeper::Git).to receive(:new).and_return(closed_mr_git)
allow(::Gitlab::Housekeeper::GitlabClient).to receive(:new).and_return(closed_mr_gitlab_client)
allow(closed_mr_git).to receive(:with_clean_state).and_yield
allow(closed_mr_git).to receive(:create_branch).with(closed_mr_change).and_return('closed-mr-branch')
allow(closed_mr_git).to receive(:in_branch).and_yield
allow(closed_mr_git).to receive(:create_commit)
allow(closed_mr_gitlab_client).to receive(:closed_merge_request_exists?)
.with(
source_project_id: '123',
source_branch: 'closed-mr-branch',
target_branch: 'master',
target_project_id: '456'
).and_return(true)
end
it 'skips the change when a closed MR exists' do
expect(closed_mr_gitlab_client).not_to receive(:get_existing_merge_request)
expect(closed_mr_gitlab_client).not_to receive(:create_or_update_merge_request)
expect(closed_mr_git).not_to receive(:push)
described_class.new(max_mrs: 1, keeps: [fake_keep_with_closed_mr]).run
end
it 'logs the skip message' do
logger = instance_double(::Gitlab::Housekeeper::Logger)
allow(::Gitlab::Housekeeper::Logger).to receive(:new).and_return(logger)
allow(logger).to receive(:puts)
expect(logger).to receive(:puts)
.with('Skipping change as we have closed an MR for this branch closed-mr-branch')
described_class.new(max_mrs: 1, keeps: [fake_keep_with_closed_mr]).run
end
end
end
end
# rubocop:enable RSpec/MultipleMemoizedHelpers

View File

@ -477,8 +477,10 @@ module API
# Gitaly RPC doesn't support pagination, but we still can limit the number of requested records
# Example: per_page = 50, page = 3
# Limit will be set to 150 to capture enough records for Kaminari pagination to extract the right slice
limit = [per_page, Kaminari.config.max_per_page].min * page
# Limit will be set to 151 to capture enough records for Kaminari pagination to extract the right slice.
# 1 is added to the limit so that Kaminari knows there are more records and correctly sets the x-next-page
# and Link headers.
limit = ([per_page, Kaminari.config.max_per_page].min * page) + 1
args = {
type: declared_params[:type],
@ -493,7 +495,9 @@ module API
refs = Kaminari.paginate_array(refs)
present paginate(refs), with: Entities::BasicRef
# Due to the limit applied above to capture just enough records, disable x-total, x-total-page, and "last" link
# in the response header. Without this, the response headers would contain incorrect and misleading values.
present paginate(refs, without_count: true), with: Entities::BasicRef
end
desc 'Post comment to commit' do

View File

@ -55678,9 +55678,6 @@ msgstr ""
msgid "SecurityConfiguration|Security configuration"
msgstr ""
msgid "SecurityConfiguration|Security labels"
msgstr ""
msgid "SecurityConfiguration|Security testing"
msgstr ""
@ -55705,9 +55702,6 @@ msgstr ""
msgid "SecurityConfiguration|Upgrade or start a free trial"
msgstr ""
msgid "SecurityConfiguration|Use security labels to classify projects by business context, security needs, and more. Categories are shared across your groups, enabling consistent and scalable classification of assets. These labels can help prioritize security efforts, filter dashboards, and apply consistent governance across projects."
msgstr ""
msgid "SecurityConfiguration|Using custom settings. You won't receive automatic updates on this variable. %{anchorStart}Restore to default%{anchorEnd}"
msgstr ""
@ -55986,9 +55980,24 @@ msgstr ""
msgid "SecurityLabels|Cancel"
msgstr ""
msgid "SecurityLabels|Categories"
msgstr ""
msgid "SecurityLabels|Category details"
msgstr ""
msgid "SecurityLabels|Category locked"
msgstr ""
msgid "SecurityLabels|Color"
msgstr ""
msgid "SecurityLabels|Create category"
msgstr ""
msgid "SecurityLabels|Create label"
msgstr ""
msgid "SecurityLabels|Delete"
msgstr ""
@ -56010,15 +56019,48 @@ msgstr ""
msgid "SecurityLabels|Edit security label"
msgstr ""
msgid "SecurityLabels|Limited edits allowed"
msgstr ""
msgid "SecurityLabels|Multiple selection"
msgstr ""
msgid "SecurityLabels|Name"
msgstr ""
msgid "SecurityLabels|Name is required"
msgstr ""
msgid "SecurityLabels|Security labels"
msgstr ""
msgid "SecurityLabels|Selection type"
msgstr ""
msgid "SecurityLabels|Single selection"
msgstr ""
msgid "SecurityLabels|Update label"
msgstr ""
msgid "SecurityLabels|Use security labels to categorize projects by business context, security needs, and more. Categories are shared across your groups, enabling consistent and scalable classification of assets. These labels can help prioritize security efforts, filter dashboards, and apply consistent governance across projects."
msgstr ""
msgid "SecurityLabels|View category settings and associated labels."
msgstr ""
msgid "SecurityLabels|View the labels available in this category"
msgstr ""
msgid "SecurityLabels|You cannot change the selection type after the category is created. To use a different selection type, create a new category."
msgstr ""
msgid "SecurityLabels|You cannot delete or edit this category. You cannot modify the labels."
msgstr ""
msgid "SecurityLabels|You cannot delete this category, but you can edit the labels."
msgstr ""
msgid "SecurityOrchestration| and "
msgstr ""

View File

@ -1445,23 +1445,43 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
end
it 'applies per_page limit to Gitaly calls' do
expect_next_instance_of(Gitlab::Repositories::ContainingCommitFinder, project.repository, commit_id, limit: 50, type: 'all') do |finder|
expect_next_instance_of(Gitlab::Repositories::ContainingCommitFinder, project.repository, commit_id, limit: 51, type: 'all') do |finder|
expect(finder).to receive(:execute).and_call_original
end
get api(route, current_user), params: { type: 'all', per_page: 50 }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_limited_pagination_headers
expect(response.headers).not_to include('X-Total', 'X-Total-Pages')
expected_page_number = 2
expect(response).to include_offset_url_params_in_next_link(expected_page_number)
end
context 'when second page is requested' do
it 'fetches enough records to display the second page' do
expect_next_instance_of(Gitlab::Repositories::ContainingCommitFinder, project.repository, commit_id, limit: 100, type: 'all') do |finder|
expect_next_instance_of(Gitlab::Repositories::ContainingCommitFinder, project.repository, commit_id, limit: 101, type: 'all') do |finder|
expect(finder).to receive(:execute).and_call_original
end
get api(route, current_user), params: { type: 'all', per_page: 50, page: 2 }
expect(response).to have_gitlab_http_status(:ok)
end
it 'returns correct response headers for last page' do
expect_next_instance_of(Gitlab::Repositories::ContainingCommitFinder, project.repository, commit_id, limit: 101, type: 'all') do |finder|
expect(finder).to receive(:execute).and_call_original
end
get api(route, current_user), params: { type: 'all', per_page: 50, page: 2 }
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers).not_to include('X-Total', 'X-Total-Pages')
expect(response.headers['Link']).not_to include('rel="next"')
expect(response.headers['Link']).to include('rel="prev"')
expect(response.headers['x-next-page']).to eq('')
expect(response.headers['x-prev-page']).to include('1')
end
end
end
end

View File

@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe API::Users, :with_current_organization, :aggregate_failures, feature_category: :user_management do
include WorkhorseHelpers
include KeysetPaginationHelpers
include PaginationHelpers
include CryptoHelpers
let_it_be(:admin) { create(:admin) }

View File

@ -1,6 +1,6 @@
# frozen_string_literal: true
module KeysetPaginationHelpers
module PaginationHelpers
def pagination_links(response)
link = response.headers['LINK']
return unless link

View File

@ -12,8 +12,19 @@ RSpec::Matchers.define :include_limited_pagination_headers do |expected|
end
end
RSpec::Matchers.define :include_offset_url_params_in_next_link do |expected_page_number|
include PaginationHelpers
match do |actual|
expect(actual.headers).to include('Link')
params_for_next_page = pagination_params_from_next_url(actual)
expect(params_for_next_page['page']).to eq(expected_page_number.to_s)
end
end
RSpec::Matchers.define :include_keyset_url_params do |expected|
include KeysetPaginationHelpers
include PaginationHelpers
match do |actual|
params_for_next_page = pagination_params_from_next_url(actual)

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.shared_examples 'an endpoint with keyset pagination' do |invalid_order: 'name', invalid_sort: 'asc'|
include KeysetPaginationHelpers
include PaginationHelpers
let(:keyset_params) { { pagination: 'keyset', per_page: 1 } }
let(:additional_params) { {} }

View File

@ -2,11 +2,11 @@
require 'spec_helper'
RSpec.describe KeysetPaginationHelpers, feature_category: :api do
RSpec.describe PaginationHelpers, feature_category: :api do
include described_class
let(:headers) { { 'LINK' => %(<#{url}>; rel="#{rel}") } }
let(:response) { instance_double('HTTParty::Response', headers: headers) }
let(:response) { instance_double(HTTParty::Response, headers: headers) }
let(:rel) { 'next' }
let(:url) do
'http://127.0.0.1:3000/api/v4/projects/7/audit_eve' \