Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-07-12 15:20:09 +00:00
parent 19c89d68d0
commit 94c87f5bff
51 changed files with 503 additions and 103 deletions

View File

@ -17,10 +17,26 @@ import PipelineArtifacts from '~/ci/pipelines_page/components/pipelines_artifact
import LegacyPipelineMiniGraph from '~/ci/pipeline_mini_graph/legacy_pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate/tooltip_on_truncate.vue';
import mergeRequestEventTypeQuery from '../queries/merge_request_event_type.query.graphql';
import runPipelineMixin from '../mixins/run_pipeline';
export default {
name: 'MRWidgetPipeline',
apollo: {
mergeRequestEventType: {
query: mergeRequestEventTypeQuery,
variables() {
return {
projectPath: this.targetProjectFullPath,
iid: `${this.iid}`,
};
},
skip() {
return !this.retargeted;
},
update: (d) => d.project?.mergeRequest?.pipelines?.nodes?.[0]?.mergeRequestEventType,
},
},
components: {
CiIcon,
GlLink,
@ -93,15 +109,15 @@ export default {
required: false,
default: false,
},
detatchedPipeline: {
targetProjectFullPath: {
type: String,
required: false,
default: null,
required: true,
},
},
data() {
return {
isCreatingPipeline: false,
mergeRequestEventType: null,
};
},
computed: {
@ -200,7 +216,7 @@ export default {
}}
</p>
<gl-button
v-if="detatchedPipeline"
v-if="mergeRequestEventType"
category="tertiary"
variant="confirm"
size="small"

View File

@ -93,7 +93,7 @@ export default {
:retargeted="mr.retargeted"
:target-project-id="mr.targetProjectId"
:iid="mr.iid"
:detatched-pipeline="mr.detatchedPipeline"
:target-project-full-path="mr.targetProjectFullPath"
/>
<template #footer>
<div v-if="mr.exposedArtifactsPath" class="js-exposed-artifacts">

View File

@ -23,12 +23,6 @@ query getState($projectPath: ID!, $iid: String!) {
status
warnings
}
pipelines(first: 1) {
nodes {
id
mergeRequestEventType
}
}
shouldBeRebased
sourceBranchExists
state

View File

@ -0,0 +1,14 @@
query mergeRequestEventTypeQuery($projectPath: ID!, $iid: String!) {
project(fullPath: $projectPath) {
id
mergeRequest(iid: $iid) {
id
pipelines(first: 1) {
nodes {
id
mergeRequestEventType
}
}
}
}
}

View File

@ -182,7 +182,6 @@ export default class MergeRequestStore {
setGraphqlData(project) {
const { mergeRequest } = project;
const pipeline = mergeRequest.headPipeline;
const pipelines = mergeRequest.pipelines?.nodes;
this.updateStatusState(mergeRequest.state);
@ -200,7 +199,6 @@ export default class MergeRequestStore {
this.ciStatus = `${this.ciStatus}-with-warnings`;
}
this.detatchedPipeline = pipelines.length ? pipelines[0].mergeRequestEventType : null;
this.commitsCount = mergeRequest.commitCount;
this.branchMissing =
mergeRequest.detailedMergeStatus !== 'NOT_OPEN' &&

View File

@ -191,7 +191,7 @@ export default {
},
},
i18n: {
title: s__('WorkItem|Linked Items'),
title: s__('WorkItem|Linked items'),
fetchError: s__('WorkItem|Something went wrong when fetching items. Please refresh this page.'),
emptyStateMessage: s__(
"WorkItem|Link items together to show that they're related or that one is blocking others.",

View File

@ -10,10 +10,10 @@ module Types
value: :idle,
alpha: { milestone: '15.7' }
value 'RUNNING',
description: 'Runner is executing jobs.',
value: :running,
alpha: { milestone: '15.7' }
value 'ACTIVE',
description: 'Runner is busy.',
value: :active,
alpha: { milestone: '17.2' }
end
end
end

View File

@ -55,7 +55,7 @@ module Types
statuses = ::Ci::RunnerManager.id_in(runner_manager_ids).with_executing_builds.index_by(&:id)
runner_manager_ids.each do |runner_manager_id|
loader.call(runner_manager_id, statuses[runner_manager_id] ? :running : :idle)
loader.call(runner_manager_id, statuses[runner_manager_id] ? :active : :idle)
end
end
end

View File

@ -158,7 +158,7 @@ module Types
statuses = ::Ci::Runner.id_in(runner_ids).with_executing_builds.index_by(&:id)
runner_ids.each do |runner_id|
loader.call(runner_id, statuses[runner_id] ? :running : :idle)
loader.call(runner_id, statuses[runner_id] ? :active : :idle)
end
end
end

View File

@ -13,7 +13,7 @@ module ProjectsHelper
end
def link_to_project(project)
link_to namespace_project_path(namespace_id: project.namespace, id: project), title: h(project.name), class: 'gl-link gl-text-truncate' do
link_to namespace_project_path(namespace_id: project.namespace, id: project), title: h(project.name), class: 'gl-link gl-truncate' do
title = content_tag(:span, project.name, class: 'project-name')
if project.namespace
@ -56,7 +56,7 @@ module ProjectsHelper
content_tag(:span, username, name_tag_options)
end
def link_to_member(project, author, opts = {}, &block)
def link_to_member(_project, author, opts = {}, &block)
default_opts = { avatar: true, name: true, title: ":name" }
opts = default_opts.merge(opts)
@ -394,7 +394,8 @@ module ProjectsHelper
end
def show_terraform_banner?(project)
Feature.enabled?(:show_terraform_banner, type: :ops) && project.repository_languages.with_programming_language('HCL').exists? && project.terraform_states.empty?
Feature.enabled?(:show_terraform_banner, type: :ops) &&
project.repository_languages.with_programming_language('HCL').exists? && project.terraform_states.empty?
end
def project_permissions_panel_data(project)

View File

@ -7,18 +7,11 @@ module WorkItems
private
def prepare_params(params: {}, permitted_params: [])
clear_label_params(params) if new_type_excludes_widget?
return if params.blank?
return unless has_permission?(:set_work_item_metadata)
service_params.merge!(params.slice(*permitted_params))
end
def clear_label_params(params)
params[:remove_label_ids] = @work_item.labels.map(&:id)
params[:add_label_ids] = []
end
end
end
end

View File

@ -5,8 +5,17 @@ module WorkItems
module LabelsService
class UpdateService < BaseService
def prepare_update_params(params: {})
clear_label_params(params) if new_type_excludes_widget?
prepare_params(params: params, permitted_params: %i[add_label_ids remove_label_ids])
end
private
def clear_label_params(params)
params[:remove_label_ids] = @work_item.labels.map(&:id)
params[:add_label_ids] = []
end
end
end
end

View File

@ -1,8 +1,8 @@
- page_title _("Background Jobs")
- page_title _("Background jobs")
- sidekiq_link = link_to('', 'https://sidekiq.org/', target: '_blank', rel: 'noopener noreferrer')
- page_description = safe_format(_('GitLab uses %{link_start}Sidekiq%{link_end} to process background jobs'), tag_pair(sidekiq_link, :link_start, :link_end))
= render ::Layouts::PageHeadingComponent.new(_('Background Jobs'),
= render ::Layouts::PageHeadingComponent.new(_('Background jobs'),
description: page_description,
options: { data: { event_tracking_load: 'true', event_tracking: 'view_admin_background_jobs_pageload' } })

View File

@ -36,13 +36,13 @@
= _('You can also upload existing files from your computer using the instructions below.')
.git-empty.js-git-empty
%h5= _('Git global setup')
%pre.gl-bg-gray-10
%pre.code.js-syntax-highlight
:preserve
git config --global user.name "#{h git_user_name}"
git config --global user.email "#{h git_user_email}"
%h5= _('Create a new repository')
%pre.gl-bg-gray-10
%pre.code.js-syntax-highlight
:preserve
git clone #{ content_tag(:span, default_url_to_repo, class: 'js-clone')}
cd #{h @project.path}
@ -55,7 +55,7 @@
git push --set-upstream origin #{h escaped_default_branch_name }
%h5= _('Push an existing folder')
%pre.gl-bg-gray-10
%pre.code.js-syntax-highlight
:preserve
cd existing_folder
git init --initial-branch=#{h escaped_default_branch_name}
@ -67,7 +67,7 @@
git push --set-upstream origin #{h escaped_default_branch_name }
%h5= _('Push an existing Git repository')
%pre.gl-bg-gray-10
%pre.code.js-syntax-highlight
:preserve
cd existing_repo
git remote rename origin old-origin
@ -76,6 +76,5 @@
%span><
git push --set-upstream origin --all
git push --set-upstream origin --tags
.project-page-layout-sidebar.js-show-on-project-root.gl-mt-5
= render "sidebar"

View File

@ -264,6 +264,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:ci_click_house_finished_pipelines_sync_cron
:worker_name: Ci::ClickHouse::FinishedPipelinesSyncCronWorker
:feature_category: :fleet_visibility
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:ci_delete_unit_tests
:worker_name: Ci::DeleteUnitTestsWorker
:feature_category: :code_testing
@ -2820,6 +2829,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: ci_click_house_finished_pipelines_sync
:worker_name: Ci::ClickHouse::FinishedPipelinesSyncWorker
:feature_category: :fleet_visibility
:has_external_dependencies: true
:urgency: :throttled
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: ci_delete_objects
:worker_name: Ci::DeleteObjectsWorker
:feature_category: :continuous_integration

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
module Ci
module ClickHouse
class FinishedPipelinesSyncCronWorker
include ApplicationWorker
idempotent!
queue_namespace :cronjob
data_consistency :delayed
feature_category :fleet_visibility
loggable_arguments 1
def perform(*args)
return unless ::Ci::ClickHouse::DataIngestion::FinishedPipelinesSyncService.enabled?
total_workers = args.first || 1
total_workers.times do |worker_index|
FinishedPipelinesSyncWorker.perform_async(worker_index, total_workers)
end
nil
end
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
module Ci
module ClickHouse
class FinishedPipelinesSyncWorker
include ApplicationWorker
include ClickHouseWorker
idempotent!
data_consistency :delayed
urgency :throttled
feature_category :fleet_visibility
loggable_arguments 1, 2
def perform(worker_index = 0, total_workers = 1)
response = ::Ci::ClickHouse::DataIngestion::FinishedPipelinesSyncService.new(
worker_index: worker_index, total_workers: total_workers
).execute
result = response.success? ? response.payload : response.deconstruct_keys(%i[message reason])
log_extra_metadata_on_done(:result, result)
end
end
end
end

View File

@ -7,7 +7,6 @@ module ClickHouse
idempotent!
data_consistency :delayed
worker_has_external_dependencies! # the worker interacts with a ClickHouse database
feature_category :compliance_management
deduplicate :until_executed, including_scheduled: true # The second job can be skipped if first job hasn't run yet.

View File

@ -8,7 +8,6 @@ module ClickHouse
idempotent!
queue_namespace :cronjob
data_consistency :delayed
worker_has_external_dependencies! # the worker interacts with a ClickHouse database
feature_category :compliance_management
def perform

View File

@ -25,6 +25,7 @@ module ClickHouseWorker
included do
click_house_migration_lock(ClickHouse::MigrationSupport::ExclusiveLock::DEFAULT_CLICKHOUSE_WORKER_TTL)
worker_has_external_dependencies! # the worker interacts with a ClickHouse database
pause_control :click_house_migration
end
end

View File

@ -603,6 +603,10 @@ production: &base
ci_runners_stale_machines_cleanup_worker:
cron: "36 * * * *"
# Periodically queue syncing of finished pipelines from p_ci_finished_pipeline_ch_sync_events to ClickHouse
ci_click_house_finished_pipelines_sync_worker:
cron: "*/4 * * * *"
# GitLab EE only jobs. These jobs are automatically enabled for an EE
# installation, and ignored for a CE installation.
ee_cron_jobs:

View File

@ -710,6 +710,10 @@ Settings.cron_jobs['performance_bar_stats']['job_class'] = 'GitlabPerformanceBar
Settings.cron_jobs['ci_catalog_resources_aggregate_last30_day_usage_worker'] ||= {}
Settings.cron_jobs['ci_catalog_resources_aggregate_last30_day_usage_worker']['cron'] ||= '*/4 * * * *'
Settings.cron_jobs['ci_catalog_resources_aggregate_last30_day_usage_worker']['job_class'] = 'Ci::Catalog::Resources::AggregateLast30DayUsageWorker'
Settings.cron_jobs['ci_click_house_finished_pipelines_sync_worker'] ||= {}
Settings.cron_jobs['ci_click_house_finished_pipelines_sync_worker']['cron'] ||= '*/4 * * * *'
Settings.cron_jobs['ci_click_house_finished_pipelines_sync_worker']['args'] ||= [1]
Settings.cron_jobs['ci_click_house_finished_pipelines_sync_worker']['job_class'] = 'Ci::ClickHouse::FinishedPipelinesSyncCronWorker'
Gitlab.ee do
Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= {}

View File

@ -169,6 +169,8 @@
- 1
- - ci_cancel_redundant_pipelines
- 1
- - ci_click_house_finished_pipelines_sync
- 1
- - ci_delete_objects
- 1
- - ci_initialize_pipelines_iid_sequence

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class EnsureUniqueIdForPCiJobAnnotation < Gitlab::Database::Migration[2.2]
include Gitlab::Database::PartitioningMigrationHelpers::UniquenessHelpers
milestone '17.3'
TABLE_NAME = :p_ci_job_annotations
SEQ_NAME = :p_ci_job_annotations_id_seq
def up
ensure_unique_id(TABLE_NAME, seq: SEQ_NAME)
end
def down
revert_ensure_unique_id(TABLE_NAME, seq: SEQ_NAME)
end
end

View File

@ -0,0 +1 @@
abb93f7fc389219ea418d08c3c3cbb562d78e67e58971b56f10b65d8a096a379

View File

@ -49,6 +49,19 @@ RETURN NEW;
END
$$;
CREATE FUNCTION assign_p_ci_job_annotations_id_value() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."id" IS NOT NULL THEN
RAISE WARNING 'Manually assigning ids is not allowed, the value will be ignored';
END IF;
NEW."id" := nextval('p_ci_job_annotations_id_seq'::regclass);
RETURN NEW;
END
$$;
CREATE FUNCTION assign_p_ci_job_artifacts_id_value() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -31708,6 +31721,8 @@ CREATE TRIGGER assign_p_ci_builds_execution_configs_id_trigger BEFORE INSERT ON
CREATE TRIGGER assign_p_ci_builds_id_trigger BEFORE INSERT ON p_ci_builds FOR EACH ROW EXECUTE FUNCTION assign_p_ci_builds_id_value();
CREATE TRIGGER assign_p_ci_job_annotations_id_trigger BEFORE INSERT ON p_ci_job_annotations FOR EACH ROW EXECUTE FUNCTION assign_p_ci_job_annotations_id_value();
CREATE TRIGGER assign_p_ci_job_artifacts_id_trigger BEFORE INSERT ON p_ci_job_artifacts FOR EACH ROW EXECUTE FUNCTION assign_p_ci_job_artifacts_id_value();
CREATE TRIGGER assign_p_ci_pipeline_variables_id_trigger BEFORE INSERT ON p_ci_pipeline_variables FOR EACH ROW EXECUTE FUNCTION assign_p_ci_pipeline_variables_id_value();

View File

@ -446,9 +446,9 @@ The **System information** page provides the following statistics:
These statistics are updated only when you go to the **System information** page, or you refresh the page in your browser.
### Background Jobs
### Background jobs
The **Background Jobs** page displays the Sidekiq dashboard. Sidekiq is used by GitLab to
The **Background jobs** page displays the Sidekiq dashboard. Sidekiq is used by GitLab to
perform processing in the background.
The Sidekiq dashboard consists of the following elements:

View File

@ -89,14 +89,14 @@ To prepare the new server:
1. Disable periodic background jobs:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the left sidebar, select **Monitoring > Background jobs**.
1. Under the Sidekiq dashboard, select **Cron** tab and then
**Disable All**.
1. Wait for the currently running CI/CD jobs to finish, or accept that jobs that have not completed may be lost.
To view jobs currently running, on the left sidebar, select **Overviews > Jobs**,
and then select **Running**.
1. Wait for Sidekiq jobs to finish:
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the left sidebar, select **Monitoring > Background jobs**.
1. Under the Sidekiq dashboard, select **Queues** and then **Live Poll**.
Wait for **Busy** and **Enqueued** to drop to 0.
These queues contain work that has been submitted by your users;
@ -200,7 +200,7 @@ to the new environment.
1. [Restore the GitLab backup](restore_gitlab.md).
1. Verify that the Redis database restored correctly:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the left sidebar, select **Monitoring > Background jobs**.
1. Under the Sidekiq dashboard, verify that the numbers
match with what was shown on the old server.
1. While still under the Sidekiq dashboard, select **Cron** and then **Enable All**

View File

@ -205,7 +205,7 @@ be disabled on the **primary** site:
1. Enable [maintenance mode](../../maintenance_mode/index.md) on the **primary** site.
1. On the left sidebar, at the bottom, select **Admin Area**.
1. Select **Monitoring > Background Jobs**.
1. Select **Monitoring > Background jobs**.
1. On the Sidekiq dashboard, select **Cron**.
1. Select `Disable All` to disable non-Geo periodic background jobs.
1. Select `Enable` for the following cronjobs:
@ -224,7 +224,7 @@ be disabled on the **primary** site:
final replication process now.
1. On the **primary** site:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the left sidebar, select **Monitoring > Background jobs**.
1. On the Sidekiq dashboard, select **Queues**, and wait for all queues except
those with `geo` in the name to drop to 0.
These queues contain work that has been submitted by your users; failing over
@ -239,7 +239,7 @@ be disabled on the **primary** site:
1. On the **secondary** site:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the left sidebar, select **Monitoring > Background jobs**.
1. On the Sidekiq dashboard, select **Queues**, and wait for all the `geo`
queues to drop to 0 queued and 0 running jobs.
1. [Run an integrity check](../../raketasks/check.md) to verify the integrity

View File

@ -109,7 +109,7 @@ follow these steps to avoid unnecessary data loss:
trigger the final replication process now.
1. On the **primary** site:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the left sidebar, select **Monitoring > Background jobs**.
1. On the Sidekiq dashboard, select **Queues**, and wait for all queues except
those with `geo` in the name to drop to 0.
These queues contain work that has been submitted by your users; failing over
@ -124,7 +124,7 @@ follow these steps to avoid unnecessary data loss:
1. On the **secondary** site:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the left sidebar, select **Monitoring > Background jobs**.
1. On the Sidekiq dashboard, select **Queues**, and wait for all the `geo`
queues to drop to 0 queued and 0 running jobs.
1. [Run an integrity check](../../../raketasks/check.md) to verify the integrity

View File

@ -120,7 +120,7 @@ follow these steps to avoid unnecessary data loss:
1. On the **primary** site:
1. On the left sidebar, at the bottom, select **Admin Area**..
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the left sidebar, select **Monitoring > Background jobs**.
1. On the Sidekiq dashboard, select **Cron**.
1. Select `Disable All` to disable any non-Geo periodic background jobs.
1. Select `Enable` for the `geo_sidekiq_cron_config_worker` cron job.
@ -138,7 +138,7 @@ follow these steps to avoid unnecessary data loss:
trigger the final replication process now.
1. On the **primary** site:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the left sidebar, select **Monitoring > Background jobs**.
1. On the Sidekiq dashboard, select **Queues**, and wait for all queues except
those with `geo` in the name to drop to 0.
These queues contain work that has been submitted by your users; failing over
@ -153,7 +153,7 @@ follow these steps to avoid unnecessary data loss:
1. On the **secondary** site:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the left sidebar, select **Monitoring > Background jobs**.
1. On the Sidekiq dashboard, select **Queues**, and wait for all the `geo`
queues to drop to 0 queued and 0 running jobs.
1. [Run an integrity check](../../../raketasks/check.md) to verify the integrity

View File

@ -182,7 +182,7 @@ you should disable all cron jobs except for those related to Geo.
To monitor queues and disable jobs:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. Select **Monitoring > Background Jobs**.
1. Select **Monitoring > Background jobs**.
1. In the Sidekiq dashboard, select **Cron** and disable jobs individually or all at once by selecting **Disable All**.
### Incident management

View File

@ -46,7 +46,7 @@ When you use self-hosted models, you:
You can connect supported models to LLM features. Model-specific prompts
and GitLab Duo feature support is provided by the self-hosted models feature. For
more information about this offering, see [subscriptions](../../subscriptions/self_managed/index.md).
more information about this offering, see [subscriptions](../../subscriptions/self_managed/index.md) and the [Blueprint](https://handbook.gitlab.com/handbook/engineering/architecture/design-documents/custom_models/).
## Prerequisites

View File

@ -96,23 +96,46 @@ sudo mkdir -p /srv/gitlab-agw
If you're running Docker with a user other than `root`, ensure appropriate
permissions have been granted to that directory.
#### Optional: Download documentation index
To improve results when asking GitLab Duo Chat questions about GitLab, you can
index GitLab documentation and provide it as a file to the AI Gateway.
To index the documentation in your local installation,run:
```shell
pip install requests langchain langchain_text_splitters
python3 scripts/custom_models/create_index.py -o <path_to_created_index/docs.db>
```
This creates a file `docs.db` at the specified path.
You can also create an index for a specific GitLab version:
```shell
python3 scripts/custom_models/create_index.py --version_tag="{gitlab-version}"
```
#### Find the AI Gateway release
In a production environment, you should set your deployment to a specific
GitLab AI Gateway release. Find the release to use in [GitLab AI Gateway releases](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/releases), for example:
```shell
docker run -p 5000:500 -e AIGW_CUSTOM_MODELS__ENABLED=true registry.gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/model-gateway:v1.4.0`
docker run -e AIGW_CUSTOM_MODELS__ENABLED=true \
-v path/to/created/index/docs.db:/app/tmp/docs.db \
-e AIGW_FASTAPI__OPENAPI_URL="/openapi.json" \
-e AIGW_AUTH__BYPASS_EXTERNAL=true \
-e AIGW_FASTAPI__DOCS_URL="/docs"\
-e AIGW_FASTAPI__API_PORT=5052 \
registry.gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/model-gateway:v1.11.0`
```
To set your deployment to the latest stable release, use the `latest` tag to run the latest stable release:
To set your deployment to the latest stable release, use the `latest` tag to run the latest stable release.
```shell
docker run -p 5000:500 -e AIGW_CUSTOM_MODELS__ENABLED=true registry.gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/model-gateway:latest`
```
NOTE:
We do not yet support multi-arch image, only `linux/amd64`. If you try to run this on Apple chip, adding `--platform linux/amd64` to the `docker run` command will help.
The arguments `AIGW_FASTAPI__OPENAPI_URL` and `AIGW_FASTAPI__DOCS_URL` are not
mandatory, but are useful for debugging. From the host, accessing `http://localhost:5052/docs`
should open the AI Gateway API documentation.
### Install by using Docker Engine
@ -172,7 +195,13 @@ To upgrade the AI Gateway, download the newest Docker image tag.
1. Pull the new image:
```shell
docker run -p 5000:500 -e AIGW_CUSTOM_MODELS__ENABLED=true registry.gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/model-gateway:latest
docker run -e AIGW_CUSTOM_MODELS__ENABLED=true \
-v path/to/created/index/docs.db:/app/tmp/docs.db \
-e AIGW_FASTAPI__OPENAPI_URL="/openapi.json" \
-e AIGW_AUTH__BYPASS_EXTERNAL=true \
-e AIGW_FASTAPI__DOCS_URL="/docs"\
-e AIGW_FASTAPI__API_PORT=5052 \
registry.gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/model-gateway:v1.11.0`
```
1. Ensure that the environment variables are all set correctly
@ -180,3 +209,21 @@ To upgrade the AI Gateway, download the newest Docker image tag.
## Alternative installation methods
For information on alternative ways to install the AI Gateway, see [issue 463773](https://gitlab.com/gitlab-org/gitlab/-/issues/463773).
## Troubleshooting
### The image's platform does not match the host
When [finding the AI Gateway release](#find-the-ai-gateway-release), you might get an error that states `The requested images platform (linux/amd64) does not match the detected host`.
To work around this error, add `--platform linux/amd64` to the `docker run` command:
```shell
docker run -e AIGW_CUSTOM_MODELS__ENABLED=true --platform linux/amd64 \
-v path/to/created/index/docs.db:/app/tmp/docs.db \
-e AIGW_FASTAPI__OPENAPI_URL="/openapi.json" \
-e AIGW_AUTH__BYPASS_EXTERNAL=true \
-e AIGW_FASTAPI__DOCS_URL="/docs"\
-e AIGW_FASTAPI__API_PORT=5052 \
registry.gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/model-gateway:v1.11.0`
```

View File

@ -49,7 +49,7 @@ to all available queues:
To view the Sidekiq processes in GitLab:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. Select **Monitoring > Background Jobs**.
1. Select **Monitoring > Background jobs**.
## Concurrency

View File

@ -34066,8 +34066,8 @@ Runner cloud provider.
| Value | Description |
| ----- | ----------- |
| <a id="cirunnerjobexecutionstatusactive"></a>`ACTIVE` **{warning-solid}** | **Introduced** in GitLab 17.2. **Status**: Experiment. Runner is busy. |
| <a id="cirunnerjobexecutionstatusidle"></a>`IDLE` **{warning-solid}** | **Introduced** in GitLab 15.7. **Status**: Experiment. Runner is idle. |
| <a id="cirunnerjobexecutionstatusrunning"></a>`RUNNING` **{warning-solid}** | **Introduced** in GitLab 15.7. **Status**: Experiment. Runner is executing jobs. |
### `CiRunnerMembershipFilter`

View File

@ -347,7 +347,7 @@ To enable advanced search:
```
1. Optional. Monitor the status of background jobs.
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the left sidebar, select **Monitoring > Background jobs**.
1. On the Sidekiq dashboard, select **Queues** and wait for the `elastic_commit_indexer`
and `elastic_wiki_indexer` queues to drop to `0`.
These queues contain jobs to index code and wiki data for groups and projects.
@ -972,7 +972,7 @@ due to large volumes of data being indexed, follow these steps:
```
This enqueues a Sidekiq job for each project that needs to be indexed.
You can view the jobs in the Admin Area under **Monitoring > Background Jobs > Queues Tab**
You can view the jobs in the Admin Area under **Monitoring > Background jobs > Queues Tab**
and select `elastic_commit_indexer`, or you can query indexing status using a Rake task:
```shell

View File

@ -313,6 +313,8 @@ With this configuration, users will have the access to each GitLab Pages deploym
When using [environments](../../../ci/environments/index.md) for pages, all pages environments are
listed on the project environment list.
You can also [group similar environments](../../../ci/environments/index.md#group-similar-environments) together.
### Deployments deletion
#### Auto-clean

View File

@ -24,10 +24,24 @@ Write code more efficiently by using generative AI to suggest code while you're
With GitLab Duo Code Suggestions, you get:
- Code completion, which suggests completions to the current line you are typing.
- Code completion, which suggests completions to the current line you are
typing. Code completion is used in most situations to quickly complete one
or a few lines of code.
- Code generation, which generates code based on a natural language code
comment block. Write a comment like `# Type more here`, then press <kbd>Enter</kbd> to generate
code based on the context of your comment and the rest of your code.
comment block. Write a comment like `# check if code suggestions are
enabled for current user`, then press <kbd>Enter</kbd> to generate code based
on the context of your comment and the rest of your code.
Code generation requests are slower than code completion requests, but provide
more accurate responses because:
- A larger LLM is used.
- Additional context is sent in the request, for example,
the libraries used by the project.
Code generation is used when the:
- User writes a comment and hits <kbd>Enter</kbd>.
- File being edited is less than five lines of code.
- User enters an empty function or method.
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
[View a click-through demo](https://gitlab.navattic.com/code-suggestions).

View File

@ -42,7 +42,7 @@ module Gitlab
username: username_and_email_generator.username,
email: username_and_email_generator.email
) do |u|
u.assign_personal_namespace(Organizations::Organization.default_organization)
u.assign_personal_namespace(root_ancestor.organization)
end
end

View File

@ -51,7 +51,7 @@ module Sidebars
def background_jobs_menu_item
::Sidebars::MenuItem.new(
title: _('Background Jobs'),
title: _('Background jobs'),
link: admin_background_jobs_path,
active_routes: { controller: 'background_jobs' },
item_id: :background_jobs

View File

@ -7858,10 +7858,10 @@ msgstr ""
msgid "Background Color"
msgstr ""
msgid "Background Jobs"
msgid "Background color"
msgstr ""
msgid "Background color"
msgid "Background jobs"
msgstr ""
msgid "Background migrations"
@ -60295,10 +60295,10 @@ msgstr ""
msgid "WorkItem|Link items together to show that they're related or that one is blocking others."
msgstr ""
msgid "WorkItem|Linked Items"
msgid "WorkItem|Linked item removed"
msgstr ""
msgid "WorkItem|Linked item removed"
msgid "WorkItem|Linked items"
msgstr ""
msgid "WorkItem|Magenta"

View File

@ -49,7 +49,6 @@ describe('MrWidgetPipelineContainer', () => {
retargeted: false,
targetProjectId: 1,
iid: 1,
detatchedPipeline: 'DETATCHED',
});
});

View File

@ -1,10 +1,12 @@
import { nextTick } from 'vue';
import Vue, { nextTick } from 'vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { trimText } from 'helpers/text_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
import { createAlert } from '~/alert';
@ -13,13 +15,17 @@ import MRWidgetPipelineComponent from '~/vue_merge_request_widget/components/mr_
import LegacyPipelineMiniGraph from '~/ci/pipeline_mini_graph/legacy_pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
import { SUCCESS } from '~/vue_merge_request_widget/constants';
import { localeDateFormat } from '~/lib/utils/datetime/locale_dateformat';
import mergeRequestEventTypeQuery from '~/vue_merge_request_widget/queries/merge_request_event_type.query.graphql';
import mockData from '../mock_data';
jest.mock('~/alert');
jest.mock('~/api');
Vue.use(VueApollo);
describe('MRWidgetPipeline', () => {
let wrapper;
let mergeRequestEventTypeQueryMock;
const defaultProps = {
pipeline: mockData.pipeline,
@ -29,6 +35,7 @@ describe('MRWidgetPipeline', () => {
ciTroubleshootingDocsPath: 'ci-help',
targetProjectId: 1,
iid: 1,
targetProjectFullPath: 'gitlab-org/gitlab',
};
const ciErrorMessage =
@ -56,16 +63,25 @@ describe('MRWidgetPipeline', () => {
const mockArtifactsRequest = () => new MockAdapter(axios).onGet().reply(HTTP_STATUS_OK, []);
const createWrapper = (props = {}, mountFn = shallowMount) => {
const apolloProvider = createMockApollo([
[mergeRequestEventTypeQuery, mergeRequestEventTypeQueryMock],
]);
wrapper = extendedWrapper(
mountFn(MRWidgetPipelineComponent, {
propsData: {
...defaultProps,
...props,
},
apolloProvider,
}),
);
};
afterEach(() => {
mergeRequestEventTypeQueryMock = null;
});
it('should render CI error if there is a pipeline, but no status', () => {
createWrapper({ ciStatus: null }, mount);
expect(findCIErrorMessage().text()).toBe(ciErrorMessage);
@ -327,11 +343,24 @@ describe('MRWidgetPipeline', () => {
describe('when merge request is retargeted', () => {
describe('when last pipeline is detatched', () => {
beforeEach(() => {
beforeEach(async () => {
mergeRequestEventTypeQueryMock = jest.fn().mockResolvedValue({
data: {
project: {
id: 1,
mergeRequest: {
id: 1,
pipelines: { nodes: [{ id: 1, mergeRequestEventType: 'DETACHED' }] },
},
},
},
});
createWrapper({
detatchedPipeline: 'DETACHED',
retargeted: true,
});
await waitForPromises();
});
it('renders branch changed message', () => {
@ -410,11 +439,24 @@ describe('MRWidgetPipeline', () => {
});
describe('when last pipeline is a branch pipeline', () => {
beforeEach(() => {
beforeEach(async () => {
mergeRequestEventTypeQueryMock = jest.fn().mockResolvedValue({
data: {
project: {
id: 1,
mergeRequest: {
id: 1,
pipelines: { nodes: [{ id: 1, mergeRequestEventType: null }] },
},
},
},
});
createWrapper({
detatchedPipeline: null,
retargeted: true,
});
await waitForPromises();
});
it('renders branch changed message', () => {

View File

@ -453,7 +453,6 @@ export const mockStore = {
targetProjectId: 1,
iid: 1,
retargeted: false,
detatchedPipeline: 'DETATCHED',
};
export const mockMergePipeline = {

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Import::ImportUserCreator, feature_category: :importers do
let(:group) { create(:group) }
let(:group) { create(:group, organization: create(:organization)) }
subject(:service) { described_class.new(portable: group) }
@ -12,6 +12,7 @@ RSpec.describe Gitlab::Import::ImportUserCreator, feature_category: :importers d
expect(user.user_type).to eq('import_user')
expect(group.reload.import_user).to eq(user)
expect(user.namespace.organization).to eq(group.organization)
end
context 'when import user already exists' do

View File

@ -83,7 +83,7 @@ RSpec.describe 'Query.runner(id)', :freeze_time, feature_category: :fleet_visibi
active: runner.active,
paused: !runner.active,
status: runner.status.to_s.upcase,
job_execution_status: runner.builds.executing.any? ? 'RUNNING' : 'IDLE',
job_execution_status: runner.builds.executing.any? ? 'ACTIVE' : 'IDLE',
maximum_timeout: runner.maximum_timeout,
access_level: runner.access_level.to_s.upcase,
run_untagged: runner.run_untagged,
@ -122,7 +122,7 @@ RSpec.describe 'Query.runner(id)', :freeze_time, feature_category: :fleet_visibi
architecture_name: runner_manager.architecture,
platform_name: runner_manager.platform,
status: runner_manager.status.to_s.upcase,
job_execution_status: runner_manager.builds.executing.any? ? 'RUNNING' : 'IDLE'
job_execution_status: runner_manager.builds.executing.any? ? 'ACTIVE' : 'IDLE'
)
end,
"pageInfo" => anything

View File

@ -35,22 +35,5 @@ RSpec.describe WorkItems::Widgets::LabelsService::CreateService, feature_categor
end
end
end
context 'when widget does not exist in new type' do
let(:params) { {} }
before do
allow(service).to receive(:new_type_excludes_widget?).and_return(true)
end
it "sets label params as empty" do
expect(service.prepare_create_params(params: params)).to include(
{
add_label_ids: [],
label_ids: []
}
)
end
end
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::ClickHouse::FinishedPipelinesSyncCronWorker, :click_house, :freeze_time, feature_category: :fleet_visibility do
let(:worker) { described_class.new }
let(:args) { [3] }
subject(:perform) { worker.perform(*args) }
it 'invokes 3 workers' do
expect(Ci::ClickHouse::FinishedPipelinesSyncWorker).to receive(:perform_async).with(0, 3).once
expect(Ci::ClickHouse::FinishedPipelinesSyncWorker).to receive(:perform_async).with(1, 3).once
expect(Ci::ClickHouse::FinishedPipelinesSyncWorker).to receive(:perform_async).with(2, 3).once
perform
end
context 'when arguments are not specified' do
let(:args) { [] }
it 'invokes 1 worker with specified arguments' do
expect(Ci::ClickHouse::FinishedPipelinesSyncWorker).to receive(:perform_async).with(0, 1)
perform
end
end
context 'when clickhouse database is not available' do
before do
allow(Gitlab::ClickHouse).to receive(:configured?).and_return(false)
end
it 'does nothing' do
expect(Ci::ClickHouse::FinishedPipelinesSyncWorker).not_to receive(:perform_async)
perform
end
end
end

View File

@ -0,0 +1,132 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::ClickHouse::FinishedPipelinesSyncWorker, :click_house, :freeze_time, feature_category: :fleet_visibility do
let(:worker) { described_class.new }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline1) do
create(:ci_pipeline, :success, project: project, ref: 'master', source: :push,
committed_at: 2.hours.before(1.month.ago), started_at: 1.hour.before(1.month.ago), finished_at: 1.month.ago,
duration: 60 * 60)
end
let_it_be(:pipeline2) do
create(:ci_pipeline, :pending, project: project, ref: 'main', source: :schedule)
end
subject(:perform) { worker.perform }
before do
create_sync_events pipeline1
end
specify do
expect(worker.class.click_house_worker_attrs).to match(
a_hash_including(migration_lock_ttl: ClickHouse::MigrationSupport::ExclusiveLock::DEFAULT_CLICKHOUSE_WORKER_TTL)
)
end
include_examples 'an idempotent worker' do
it 'calls CiFinishedPipelinesSyncService and returns its response payload' do
expect(worker).to receive(:log_extra_metadata_on_done)
.with(:result, {
reached_end_of_table: true, records_inserted: 1,
worker_index: 0, total_workers: 1
})
params = { worker_index: 0, total_workers: 1 }
expect_next_instance_of(::Ci::ClickHouse::DataIngestion::FinishedPipelinesSyncService, params) do |service|
expect(service).to receive(:execute).and_call_original
end
expect(ClickHouse::Client).to receive(:insert_csv).once.and_call_original
expect { perform }.to change { ci_finished_pipelines_row_count }.by(::Ci::Pipeline.finished.count)
end
context 'when an error is reported from service' do
before do
allow(Gitlab::ClickHouse).to receive(:configured?).and_return(false)
end
it 'skips execution' do
expect(worker).to receive(:log_extra_metadata_on_done)
.with(:result, { message: 'Disabled: ClickHouse database is not configured.', reason: :db_not_configured })
perform
end
end
end
context 'when exclusive lease error happens' do
context 'when the exclusive lease is already locked for the worker' do
it 'does nothing' do
expect_next_instance_of(::Ci::ClickHouse::DataIngestion::FinishedPipelinesSyncService) do |service|
expect(service).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
end
expect { perform }.not_to change { ci_finished_pipelines_row_count }
expect(perform).to eq({
message: 'Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError',
reason: :skipped
})
end
end
end
context 'with 2 workers' do
using RSpec::Parameterized::TableSyntax
subject(:perform) { worker.perform(worker_index, 2) }
where(:worker_index) { [0, 1] }
with_them do
let(:params) { { worker_index: worker_index, total_workers: 2 } }
it 'processes record if it falls on specified partition' do
# select the records that fall in the specified partition
partition_count = ::Ci::ClickHouse::DataIngestion::FinishedPipelinesSyncService::PIPELINE_ID_PARTITIONS
modulus_arel = Arel.sql("(pipeline_id % #{partition_count})")
lower_bound = (worker_index * partition_count / params[:total_workers]).to_i
upper_bound = ((worker_index + 1) * partition_count / params[:total_workers]).to_i
pipeline_ids =
Ci::FinishedPipelineChSyncEvent
.where(modulus_arel.gteq(lower_bound))
.where(modulus_arel.lt(upper_bound))
.map(&:pipeline_id)
expect(worker).to receive(:log_extra_metadata_on_done)
.with(:result, { reached_end_of_table: true, records_inserted: pipeline_ids.count }.merge(params))
expect_next_instance_of(::Ci::ClickHouse::DataIngestion::FinishedPipelinesSyncService, params) do |service|
expect(service).to receive(:execute).and_call_original
end
if pipeline_ids.any?
expect(ClickHouse::Client).to receive(:insert_csv).once.and_call_original
else
expect(ClickHouse::Client).not_to receive(:insert_csv)
end
perform
end
end
end
def create_sync_events(*pipelines)
pipelines.each do |pipeline|
Ci::FinishedPipelineChSyncEvent.new(
pipeline_id: pipeline.id, pipeline_finished_at: pipeline.finished_at,
project_namespace_id: pipeline.project.project_namespace_id
).save!
end
end
def ci_finished_pipelines_row_count
ClickHouse::Client.select('SELECT COUNT(*) AS count FROM ci_finished_pipelines FINAL', :main).first['count']
end
end

View File

@ -85,4 +85,8 @@ RSpec.describe ClickHouseWorker, feature_category: :database do
expect(worker.get_pause_control).to eq(:click_house_migration)
expect(another_worker.get_pause_control).to be_nil
end
it 'marks the worker as having external dependencies' do
expect(worker.worker_has_external_dependencies?).to be_truthy
end
end