Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-11 12:09:17 +00:00
parent 988375009f
commit fd31bd1fc7
85 changed files with 991 additions and 504 deletions

View File

@ -44,7 +44,7 @@ export default {
sandbox
:srcdoc="rawCode"
frameborder="0"
scrolling="no"
scrolling="auto"
width="100%"
class="gl-overflow-auto"
></iframe>

View File

@ -292,6 +292,7 @@ export default class MergeRequestStore {
this.suggestPipelineFeatureId = data.suggest_pipeline_feature_id;
this.isDismissedSuggestPipeline = data.is_dismissed_suggest_pipeline;
this.securityReportsDocsPath = data.security_reports_docs_path;
this.securityConfigurationPath = data.security_configuration_path;
// code quality
const blobPath = data.blob_path || {};

View File

@ -57,10 +57,8 @@
@import 'framework/responsive_tables';
@import 'framework/stacked_progress_bar';
@import 'framework/sortable';
@import 'framework/ci_variable_list';
@import 'framework/feature_highlight';
@import 'framework/read_more';
@import 'framework/flex_grid';
@import 'framework/system_messages';
@import 'framework/spinner';
@import 'framework/card';

View File

@ -1,77 +0,0 @@
.ci-variable-list {
margin-left: 0;
margin-bottom: 0;
padding-left: 0;
list-style: none;
clear: both;
}
.ci-variable-row {
display: flex;
align-items: flex-start;
@include media-breakpoint-down(xs) {
align-items: flex-end;
}
&:not(:last-child) {
margin-bottom: $gl-btn-padding;
@include media-breakpoint-down(xs) {
margin-bottom: 3 * $gl-btn-padding;
}
}
&:last-child {
.ci-variable-body-item:last-child {
margin-right: $ci-variable-remove-button-width;
@include media-breakpoint-down(xs) {
margin-right: 0;
}
}
.ci-variable-row-remove-button {
display: none;
}
@include media-breakpoint-down(xs) {
.ci-variable-row-body {
margin-right: $ci-variable-remove-button-width;
}
}
}
}
.ci-variable-row-body {
display: flex;
align-items: flex-start;
width: 100%;
padding-bottom: $gl-padding;
@include media-breakpoint-down(xs) {
display: block;
}
}
.ci-variable-body-item {
flex: 1;
&:not(:last-child) {
margin-right: $gl-btn-padding;
@include media-breakpoint-down(xs) {
margin-right: 0;
margin-bottom: $gl-btn-padding;
}
}
}
.ci-variable-masked-item,
.ci-variable-protected-item {
flex: 0 1 auto;
display: flex;
align-items: center;
padding-top: 5px;
padding-bottom: 5px;
}

View File

@ -1,52 +0,0 @@
.flex-grid {
.grid-row {
border-bottom: 1px solid $border-color;
padding: 0;
&:last-child {
border-bottom: 0;
}
@include media-breakpoint-down(md) {
border-bottom: 0;
border-right: 1px solid $border-color;
&:last-child {
border-right: 0;
}
}
@include media-breakpoint-down(xs) {
border-right: 0;
border-bottom: 1px solid $border-color;
&:last-child {
border-bottom: 0;
}
}
}
.grid-cell {
padding: 10px $gl-padding;
border-right: 1px solid $border-color;
&:last-child {
border-right: 0;
}
@include media-breakpoint-up(md) {
flex: 1;
}
@include media-breakpoint-down(md) {
border-right: 0;
flex: none;
}
}
}
.card {
.card-body.flex-grid {
padding: 0;
}
}

View File

@ -1,5 +1,74 @@
@import 'mixins_and_variables_and_functions';
.ci-variable-list {
margin-left: 0;
margin-bottom: 0;
padding-left: 0;
list-style: none;
clear: both;
}
.ci-variable-row {
display: flex;
align-items: flex-start;
@include media-breakpoint-down(xs) {
align-items: flex-end;
}
&:not(:last-child) {
margin-bottom: $gl-btn-padding;
@include media-breakpoint-down(xs) {
margin-bottom: 3 * $gl-btn-padding;
}
}
&:last-child {
.ci-variable-body-item:last-child {
margin-right: $ci-variable-remove-button-width;
@include media-breakpoint-down(xs) {
margin-right: 0;
}
}
.ci-variable-row-remove-button {
display: none;
}
@include media-breakpoint-down(xs) {
.ci-variable-row-body {
margin-right: $ci-variable-remove-button-width;
}
}
}
}
.ci-variable-row-body {
display: flex;
align-items: flex-start;
width: 100%;
padding-bottom: $gl-padding;
@include media-breakpoint-down(xs) {
display: block;
}
}
.ci-variable-body-item {
flex: 1;
&:not(:last-child) {
margin-right: $gl-btn-padding;
@include media-breakpoint-down(xs) {
margin-right: 0;
margin-bottom: $gl-btn-padding;
}
}
}
.pipeline-schedule-form {
.gl-field-error {
margin: 10px 0 0;

View File

@ -2,6 +2,11 @@
module Projects
class ProjectAttributesChangedEvent < ::Gitlab::EventStore::Event
PAGES_RELATED_ATTRIBUTES = %w[
pages_https_only
visibility_level
].freeze
def schema
{
'type' => 'object',
@ -14,5 +19,11 @@ module Projects
'required' => %w[project_id namespace_id root_namespace_id attributes]
}
end
def pages_related?
PAGES_RELATED_ATTRIBUTES.any? do |attribute|
data[:attributes].include?(attribute)
end
end
end
end

View File

@ -13,10 +13,6 @@ module Ml
has_internal_id :iid, scope: :project
def artifact_location
'not_implemented'
end
class << self
def by_project_id_and_iid(project_id, iid)
find_by(project_id: project_id, iid: iid)
@ -26,8 +22,8 @@ module Ml
find_by(project_id: project_id, name: name)
end
def has_record?(project_id, name)
where(project_id: project_id, name: name).exists?
def by_project_id(project_id)
where(project_id: project_id)
end
end
end

View File

@ -37,7 +37,6 @@ module Ci
Gitlab::Ci::Pipeline::Chain::CreateDeployments,
Gitlab::Ci::Pipeline::Chain::CreateCrossDatabaseAssociations,
Gitlab::Ci::Pipeline::Chain::Limit::Activity,
Gitlab::Ci::Pipeline::Chain::Limit::JobActivity, # deprecated in favour of Limit::ActiveJobs
Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines,
Gitlab::Ci::Pipeline::Chain::Metrics,
Gitlab::Ci::Pipeline::Chain::TemplateUsage,

View File

@ -0,0 +1,85 @@
# frozen_string_literal: true
module Ml
module ExperimentTracking
class CandidateRepository
attr_accessor :project, :user, :experiment, :candidate
def initialize(project, user)
@project = project
@user = user
end
def by_iid(iid)
::Ml::Candidate.with_project_id_and_iid(project.id, iid)
end
def create!(experiment, start_time)
experiment.candidates.create!(
user: user,
start_time: start_time || 0
)
end
def update(candidate, status, end_time)
candidate.status = status.downcase if status
candidate.end_time = end_time if end_time
candidate.save
end
def add_metric!(candidate, name, value, tracked_at, step)
candidate.metrics.create!(
name: name,
value: value,
tracked_at: tracked_at,
step: step
)
end
def add_param!(candidate, name, value)
candidate.params.create!(name: name, value: value)
end
def add_metrics(candidate, metric_definitions)
return unless candidate.present?
metrics = metric_definitions.map do |metric|
{
candidate_id: candidate.id,
name: metric[:key],
value: metric[:value],
tracked_at: metric[:timestamp],
step: metric[:step],
**timestamps
}
end
::Ml::CandidateMetric.insert_all(metrics, returning: false) unless metrics.empty?
end
def add_params(candidate, param_definitions)
return unless candidate.present?
parameters = param_definitions.map do |p|
{
candidate_id: candidate.id,
name: p[:key],
value: p[:value],
**timestamps
}
end
::Ml::CandidateParam.insert_all(parameters, returning: false) unless parameters.empty?
end
private
def timestamps
current_time = Time.zone.now
{ created_at: current_time, updated_at: current_time }
end
end
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
module Ml
module ExperimentTracking
class ExperimentRepository
attr_accessor :project, :user
def initialize(project, user = nil)
@project = project
@user = user
end
def by_iid_or_name(iid: nil, name: nil)
return ::Ml::Experiment.by_project_id_and_iid(project.id, iid) if iid
::Ml::Experiment.by_project_id_and_name(project.id, name) if name
end
def all
::Ml::Experiment.by_project_id(project.id)
end
def create!(name)
::Ml::Experiment.create!(name: name,
user: user,
project: project)
end
end
end
end

View File

@ -12,6 +12,7 @@
window.gl.mrWidgetData.mr_troubleshooting_docs_path = '#{help_page_path('user/project/merge_requests/reviews/index.md', anchor: 'troubleshooting')}';
window.gl.mrWidgetData.pipeline_must_succeed_docs_path = '#{help_page_path('user/project/merge_requests/merge_when_pipeline_succeeds.md', anchor: 'require-a-successful-pipeline-for-merge')}';
window.gl.mrWidgetData.security_approvals_help_page_path = '#{help_page_path('user/application_security/index.md', anchor: 'security-approvals-in-merge-requests')}';
window.gl.mrWidgetData.security_configuration_path = '#{project_security_configuration_path(@project)}';
window.gl.mrWidgetData.license_compliance_docs_path = '#{help_page_path('user/compliance/license_compliance/index.md', anchor: 'policies')}';
window.gl.mrWidgetData.eligible_approvers_docs_path = '#{help_page_path('user/project/merge_requests/approvals/rules.md', anchor: 'eligible-approvers')}';
window.gl.mrWidgetData.approvals_help_path = '#{help_page_path("user/project/merge_requests/approvals/index.md")}';

View File

@ -1,10 +1,10 @@
- form_field_classes = local_assigns[:admin_view] || !Feature.enabled?(:project_list_filter_bar) ? 'input-short js-projects-list-filter' : ''
- form_field_classes = local_assigns[:admin_view] || !Feature.enabled?(:project_list_filter_bar) ? 'input-short js-projects-list-filter' : 'gl-w-full! gl-pl-7 '
- placeholder = local_assigns[:search_form_placeholder] ? search_form_placeholder : _('Filter by name')
= form_tag filter_projects_path, method: :get, class: 'project-filter-form', data: { qa_selector: 'project_filter_form_container' }, id: 'project-filter-form' do |f|
= search_field_tag :name, params[:name],
placeholder: placeholder,
class: "project-filter-form-field form-control gl-w-full! gl-pl-7 #{form_field_classes}",
class: "project-filter-form-field form-control #{form_field_classes}",
spellcheck: false,
id: 'project-filter-form-field',
autofocus: local_assigns[:autofocus]

View File

@ -1,8 +0,0 @@
---
name: ci_limit_active_jobs_early
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97700
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/373284
milestone: '15.4'
type: development
group: group::pipeline execution
default_enabled: false

View File

@ -13,7 +13,7 @@ GitLab supports authentication using smartcards.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/33669) in GitLab 12.6.
By default, existing users can continue to log in with a username and password when smartcard
By default, existing users can continue to sign in with a username and password when smartcard
authentication is enabled.
To force existing users to use only smartcard authentication,

View File

@ -233,7 +233,7 @@ At this point, your **secondary** site contains an up-to-date copy of everything
## Promote the **secondary** site
After the replication is finished, [promote the **secondary** site to a **primary** site](index.md). This process causes a brief outage on the **secondary** site, and users may need to log in again. If you follow the steps correctly, the old primary Geo site should still be disabled and user traffic should go to the newly-promoted site instead.
After the replication is finished, [promote the **secondary** site to a **primary** site](index.md). This process causes a brief outage on the **secondary** site, and users may need to sign in again. If you follow the steps correctly, the old primary Geo site should still be disabled and user traffic should go to the newly-promoted site instead.
When the promotion is completed, the maintenance window is over, and your new **primary** site now
begins to diverge from the old one. If problems do arise at this point, failing

View File

@ -121,7 +121,7 @@ follow these steps to avoid unnecessary data loss:
```
From this point, users are unable to view their data or make changes on the
**primary** site. They are also unable to log in to the **secondary** site.
**primary** site. They are also unable to sign in to the **secondary** site.
However, existing sessions must work for the remainder of the maintenance period, and
so public data is accessible throughout.

View File

@ -106,7 +106,7 @@ follow these steps to avoid unnecessary data loss:
```
From this point, users are unable to view their data or make changes on the
**primary** site. They are also unable to log in to the **secondary** site.
**primary** site. They are also unable to sign in to the **secondary** site.
However, existing sessions need to work for the remainder of the maintenance period, and
so public data is accessible throughout.

View File

@ -63,7 +63,7 @@ Geo node in a PostgreSQL console (`sudo gitlab-psql`):
## Remove Geo-related configuration
1. For each node on your primary Geo site, SSH into the node and log in as root:
1. For each node on your primary Geo site, SSH into the node and sign in as root:
```shell
sudo -i

View File

@ -69,6 +69,6 @@ That's totally fine. We use HTTP(s) to fetch repository changes from the **prima
Yes. See [Container Registry for a **secondary** site](container_registry.md).
## Can you log in to a secondary site?
## Can you sign in to a secondary site?
Yes, but secondary sites receive all authentication data (like user accounts and logins) from the primary instance. This means you are re-directed to the primary for authentication and then routed back.

View File

@ -53,7 +53,7 @@ you already have a working GitLab instance that is in-use, it can be used as a
The second GitLab site serves as the Geo **secondary** site. Again, use the
[GitLab reference architectures documentation](../../reference_architectures/index.md) to set this up.
It's a good idea to log in and test it. However, be aware that its data is
It's a good idea to sign in and test it. However, be aware that its data is
wiped out as part of the process of replicating from the **primary** site.
## Configure a GitLab site to be the Geo **primary** site

View File

@ -16,14 +16,14 @@ You must use a [GitLab Premium](https://about.gitlab.com/pricing/) license or hi
but you only need one license for all the sites.
WARNING:
The steps below should be followed in the order they appear. **Make sure the GitLab version is the same on all sites. Do not create an account or log in to the new secondary.**
The steps below should be followed in the order they appear. **Make sure the GitLab version is the same on all sites. Do not create an account or sign in to the new secondary.**
## Using Omnibus GitLab
If you installed GitLab using the Omnibus packages (highly recommended):
1. Confirm the [requirements for running Geo](../index.md#requirements-for-running-geo) are met.
1. [Install GitLab Enterprise Edition](https://about.gitlab.com/install/) on the nodes that serve as the **secondary** site. **Do not create an account or log in** to the new **secondary** site. The **GitLab version must match** across primary and secondary sites.
1. [Install GitLab Enterprise Edition](https://about.gitlab.com/install/) on the nodes that serve as the **secondary** site. **Do not create an account or sign in** to the new **secondary** site. The **GitLab version must match** across primary and secondary sites.
1. [Add the GitLab License](../../../user/admin_area/license.md) on the **primary** site to unlock Geo. The license must be for [GitLab Premium](https://about.gitlab.com/pricing/) or higher.
1. [Confirm network connectivity](../index.md#firewall-rules) between the **primary** and **secondary** site.
1. [Set up the database replication](database.md) (`primary (read-write) <-> secondary (read-only)` topology).

View File

@ -54,7 +54,7 @@ read the [Kroki installation](https://docs.kroki.io/kroki/setup/install/#_images
## Enable Kroki in GitLab
You need to enable Kroki integration from Settings under Admin Area.
To do that, log in with an administrator account and follow these steps:
To do that, sign in with an administrator account and follow these steps:
1. On the top bar, select **Main menu > Admin**.
1. Go to **Settings > General**.

View File

@ -82,7 +82,7 @@ them to disable Maintenance Mode after it's been enabled.
### Authentication
All users can log in and out of the GitLab instance but no new users can be created.
All users can sign in and out of the GitLab instance but no new users can be created.
If there are [LDAP syncs](../auth/ldap/index.md) scheduled for that time, they fail since user creation is disabled. Similarly, [user creations based on SAML](../../integration/saml.md#general-setup) fail.
@ -113,9 +113,9 @@ For most JSON requests, `POST`, `PUT`, `PATCH`, and `DELETE` are blocked, and th
|:----:|:--------------------------------------:|:----:|
| `POST` | `/admin/application_settings/general` | To allow updating application settings in the administrator UI |
| `PUT` | `/api/v4/application/settings` | To allow updating application settings with the API |
| `POST` | `/users/sign_in` | To allow users to log in. |
| `POST` | `/users/sign_out`| To allow users to log out. |
| `POST` | `/oauth/token` | To allow users to log in to a Geo secondary for the first time. |
| `POST` | `/users/sign_in` | To allow users to sign in. |
| `POST` | `/users/sign_out`| To allow users to sign out. |
| `POST` | `/oauth/token` | To allow users to sign in to a Geo secondary for the first time. |
| `POST` | `/admin/session`, `/admin/session/destroy` | To allow [Admin Mode for GitLab administrators](https://gitlab.com/groups/gitlab-org/-/epics/2158) |
| `POST` | Paths ending with `/compare`| Git revision routes. |
| `POST` | `.git/git-upload-pack` | To allow Git pull/clone. |

View File

@ -81,7 +81,7 @@ GitLab displays your link in the **Main menu > Admin > Monitoring > Metrics Dash
When setting up Grafana through the process above, no scope shows in the screen at
**Main menu > Admin > Applications > GitLab Grafana**. However, the `read_user` scope is
required and is provided to the application automatically. Setting any scope other than
`read_user` without also including `read_user` leads to this error when you try to log in using
`read_user` without also including `read_user` leads to this error when you try to sign in using
GitLab as the OAuth provider:
```plaintext

View File

@ -74,7 +74,7 @@ $ ssh-add -L | grep cert | ssh-keygen -L -f -
```
Technically that's not strictly true, for example, it could be
`prod-aearnfjord` if it's a SSH certificate you'd normally log in to
`prod-aearnfjord` if it's a SSH certificate you'd normally sign in to
servers as the `prod-aearnfjord` user, but then you must specify your
own `AuthorizedPrincipalsCommand` to do that mapping instead of using
our provided default.
@ -108,7 +108,7 @@ Where `{KEY_ID}` is the `%i` argument passed to the script
You need to customize the `sshUsers` part of that. It should be
some principal that's guaranteed to be part of the key for all users
who can log in to GitLab, or you must provide a list of principals,
who can sign in to GitLab, or you must provide a list of principals,
one of which is present for the user, for example:
```plaintext
@ -123,7 +123,7 @@ into multiple lines of `authorized_keys` output, as described in the
`AuthorizedPrincipalsFile` documentation in `sshd_config(5)`.
Normally when using the `AuthorizedKeysCommand` with OpenSSH the
principal is some "group" that's allowed to log into that
principal is some "group" that's allowed to sign in to that
server. However with GitLab it's only used to appease OpenSSH's
requirement for it, we effectively only care about the "key ID" being
correct. Once that's extracted GitLab enforces its own ACLs for

View File

@ -27,7 +27,7 @@ This Rake task does not list time zones in TZInfo format required by Omnibus Git
GitLab defaults its time zone to UTC. It has a global time zone configuration parameter in `/etc/gitlab/gitlab.rb`.
To obtain a list of time zones, log in to your GitLab application server and run a command that generates a list of time zones in TZInfo format for the server. For example, install `timedatectl` and run `timedatectl list-timezones`.
To obtain a list of time zones, sign in to your GitLab application server and run a command that generates a list of time zones in TZInfo format for the server. For example, install `timedatectl` and run `timedatectl list-timezones`.
To update, add the time zone that best applies to your location. For example:

View File

@ -276,7 +276,7 @@ Parameters:
| Parameter | Type | Required | Description |
|---------------|---------|----------|---------------------------------------------------------------------------------------------|
| `token` | string | true | Campfire API token. To find it, log into Campfire and select **My info**. |
| `token` | string | true | Campfire API token. To find it, sign in to Campfire and select **My info**. |
| `subdomain` | string | false | Campfire subdomain. Text between `https://` and `.campfirenow.com` when you're logged in. |
| `room` | string | false | Campfire room. The last part of the URL when you're in a room. |

View File

@ -388,7 +388,7 @@ Proposal:
| Author | Hayley Swimelar |
| Engineering Leader | Sam Goldstein |
| Product Manager | |
| Architecture Evolution Coach | |
| Architecture Evolution Coach | Andrew Newdigate |
| Recommender | |
| Recommender | |
| Recommender | |

View File

@ -608,7 +608,7 @@ build:
- docker run my-docker-image /script/to/run/tests
```
To log in to Docker Hub, leave `$DOCKER_REGISTRY`
To sign in to Docker Hub, leave `$DOCKER_REGISTRY`
empty or remove it.
### Option 2: Mount `~/.docker/config.json` on each job

View File

@ -402,7 +402,7 @@ pulling from Docker Hub fails. Docker daemon tries to use the same credentials f
> Introduced in GitLab Runner 12.0.
As an example, let's assume that you want to use the `<aws_account_id>.dkr.ecr.<region>.amazonaws.com/private/image:latest`
image. This image is private and requires you to log in into a private container registry.
image. This image is private and requires you to sign in to a private container registry.
To configure access for `<aws_account_id>.dkr.ecr.<region>.amazonaws.com`, follow these steps:

View File

@ -115,9 +115,9 @@ SSH key.
You can generate the SSH key from the machine that GitLab Runner is installed
on, and use that key for all projects that are run on this machine.
1. First, log in to the server that runs your jobs.
1. First, sign in to the server that runs your jobs.
1. Then, from the terminal, log in as the `gitlab-runner` user:
1. Then, from the terminal, sign in as the `gitlab-runner` user:
```shell
sudo su - gitlab-runner

View File

@ -417,7 +417,7 @@ In this environment, OpenSSL refuses to perform cryptographic operations
forbidden by the FIPS standards. This enables you to reproduce FIPS-related bugs,
and validate fixes.
You should be able to open a web browser inside the virtual machine and log in
You should be able to open a web browser inside the virtual machine and sign in
to the GitLab instance.
You can disable FIPS mode again by running this command, then restarting the

View File

@ -69,8 +69,8 @@ the lock, it switches to standby mode.
Geo uses [streaming replication](#streaming-replication) to replicate
the database from the **primary** to the **secondary** sites. This
replication gives the **secondary** sites access to all the data saved
in the database. So users can log in on the **secondary** and read all
the issues, merge requests, and so on, on the **secondary** site.
in the database, so users can sign in to the **secondary** site and read,
for example, all the issues and merge requests.
### Repository replication

View File

@ -75,7 +75,7 @@ application from the registry. For example, the GitLab container registry direct
from `https://gitlab.com/jwt/auth`. This endpoint is part of the `gitlab-org/gitlab` project, also known as the
rails project or web service.
When a user tries to log into the dependency proxy with a Docker client, we must tell it where to get a JWT. We
When a user tries to sign in to the dependency proxy with a Docker client, we must tell it where to get a JWT. We
can use the same endpoint we use with the container registry: `https://gitlab.com/jwt/auth`. But in our case,
we tell the Docker client to specify `service=dependency_proxy` in the parameters so can use a separate underlying
service to generate the token.

View File

@ -79,7 +79,7 @@ GitLab provides built-in tools to help improve performance and availability:
- [Service measurement](service_measurement.md) for measuring and logging service execution.
GitLab team members can use [GitLab.com's performance monitoring systems](https://about.gitlab.com/handbook/engineering/monitoring/) located at
[`dashboards.gitlab.net`](https://dashboards.gitlab.net), this requires you to log in using your
[`dashboards.gitlab.net`](https://dashboards.gitlab.net), this requires you to sign in using your
`@gitlab.com` email address. Non-GitLab team-members are advised to set up their
own Prometheus and Grafana stack.

View File

@ -246,7 +246,7 @@ end
```
The `before` block is essentially a `before(:each)` and is run before each example,
ensuring we now log in at the beginning of each test.
ensuring we now sign in at the beginning of each test.
## Test setup using resources and page objects

View File

@ -299,7 +299,7 @@ point of failure and so the screenshot would not be captured at the right moment
## Ensure tests do not leave the browser logged in
All tests expect to be able to log in at the start of the test.
All tests expect to be able to sign in at the start of the test.
For an example see [issue #34736](https://gitlab.com/gitlab-org/gitlab/-/issues/34736).

View File

@ -84,7 +84,7 @@ the GitLab handbook information for the [shared 1Password account](https://about
### Enable a feature flag for my Review App
1. Open your Review App and log in as documented above.
1. Open your Review App and sign in as documented above.
1. Create a personal access token.
1. Enable the feature flag using the [Feature flag API](../../api/features.md).

View File

@ -670,9 +670,9 @@ Depending on how you installed GitLab and if you did not change the password by
- Your instance ID if you used the official GitLab AMI.
- A randomly generated password stored for 24 hours in `/etc/gitlab/initial_root_password`.
To change the default password, log in as the `root` user with the default password and [change it in the user profile](../../user/profile#change-your-password).
To change the default password, sign in as the `root` user with the default password and [change it in the user profile](../../user/profile#change-your-password).
When our [auto scaling group](#create-an-auto-scaling-group) spins up new instances, we are able to log in with username `root` and the newly created password.
When our [auto scaling group](#create-an-auto-scaling-group) spins up new instances, we are able to sign in with username `root` and the newly created password.
### Create custom AMI

View File

@ -122,7 +122,7 @@ After starting a container you can visit `gitlab.example.com` (or
`http://192.168.59.103` if you used boot2docker on macOS). It might take a while
before the Docker container starts to respond to queries.
Visit the GitLab URL, and log in with username `root`
Visit the GitLab URL, and sign in with the username `root`
and the password from the following command:
```shell

View File

@ -31,7 +31,7 @@ After you have performed those two steps, you can [create a VM](#creating-the-vm
To deploy GitLab on GCP you must create a virtual machine:
1. Go to <https://console.cloud.google.com/compute/instances> and log in with your Google credentials.
1. Go to <https://console.cloud.google.com/compute/instances> and sign in with your Google credentials.
1. Select **Create**
![Search for GitLab](img/launch_vm.png)
@ -49,7 +49,7 @@ To deploy GitLab on GCP you must create a virtual machine:
## Installing GitLab
After a few seconds, the instance is created and available to log in. The next step is to install GitLab onto the instance.
After a few seconds, the instance is created and available to sign in. The next step is to install GitLab onto the instance.
![Deploy settings](img/vm_created.png)

View File

@ -6,7 +6,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Salesforce OmniAuth Provider **(FREE SELF)**
You can integrate your GitLab instance with [Salesforce](https://www.salesforce.com/) to enable users to log in to your GitLab instance with their Salesforce account.
You can integrate your GitLab instance with [Salesforce](https://www.salesforce.com/) to enable users to sign in to your GitLab instance with their Salesforce account.
## Create a Salesforce Connected App

View File

@ -87,7 +87,7 @@ block_auto_created_users: false
This task disables two-factor authentication (2FA) for all users that have it enabled. This can be
useful if the GitLab `config/secrets.yml` file has been lost and users are unable
to log in, for example.
to sign in, for example.
To disable two-factor authentication for all users, run:

View File

@ -56,7 +56,7 @@ To unlock a locked user:
1. Exit the console with <kbd>Control</kbd>+<kbd>d</kbd>
The user should now be able to log in.
The user should now be able to sign in.
<!-- ## Troubleshooting

View File

@ -259,7 +259,7 @@ generated for the renewal and available for viewing or download on the
#### Enable or disable automatic subscription renewal
To view or change automatic subscription renewal (at the same tier as the
previous period), log in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in), and:
previous period), sign in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in), and:
- If a **Resume subscription** button is displayed, your subscription was canceled
previously. Click it to resume automatic renewal.

View File

@ -300,7 +300,7 @@ then [renew your GitLab self-managed subscription](#renew-a-subscription).
The [Customers Portal](https://customers.gitlab.com/customers/sign_in) is your
tool for renewing and modifying your subscription. Before going ahead with renewal,
log in and verify or update:
sign in and verify or update:
- The invoice contact details on the **Account details** page.
- The credit card on file on the **Payment Methods** page.

View File

@ -42,7 +42,7 @@ to ensure the major components of GitLab are working:
```
1. In GitLab UI, check that:
- Users can log in.
- Users can sign in.
- The project list is visible.
- Project issues and merge requests are accessible.
- Users can clone repositories from GitLab.

View File

@ -83,7 +83,7 @@ by removing them in LDAP, or directly from the Admin Area. To do this:
A blocked user:
- Cannot log in.
- Cannot sign in.
- Cannot access Git repositories or the API.
- Does not receive any notifications from GitLab.
- Cannot use [slash commands](../../integration/slash_commands.md).

View File

@ -53,7 +53,7 @@ The following is an example of the **Abuse Reports** page:
### Blocking users
A blocked user cannot log in or access any repositories, but all of their data
A blocked user cannot sign in or access any repositories, but all of their data
remains.
Blocking a user:

View File

@ -103,7 +103,7 @@ To create a Jira issue for a vulnerability:
1. On the left sidebar, select **Security & Compliance > Vulnerability report**.
1. Select the vulnerability's description.
1. Select **Create Jira issue**.
1. If you're not already logged in to Jira, log in.
1. If you're not already logged in to Jira, sign in.
The Jira issue is created and opened in a new browser tab. The **Summary** and **Description**
fields are pre-populated from the vulnerability's details.

View File

@ -39,7 +39,7 @@ To generate a SAML Response:
console.
- Firefox: Select the SAML-tracer icon located on the browser toolbar.
1. Go to the GitLab single sign-on URL for the group in the same browser tab with the SAML tracer open.
1. Select **Authorize** or attempt to log in. A SAML response is displayed in the tracer console that resembles this
1. Select **Authorize** or attempt to sign in. A SAML response is displayed in the tracer console that resembles this
[example SAML response](index.md#example-saml-response).
1. Within the SAML tracer, select the **Export** icon to save the response in JSON format.
@ -175,7 +175,7 @@ initiated by the service provider and not only the identity provider.
A user can see this message when they are trying to [manually link SAML to their existing GitLab.com account](index.md#linking-saml-to-your-existing-gitlabcom-account).
To resolve this problem, the user should check they are using the correct GitLab password to log in. The user first needs
To resolve this problem, the user should check they are using the correct GitLab password to sign in. The user first needs
to [reset their password](https://gitlab.com/users/password/new) if both:
- The account was provisioned by SCIM.
@ -202,7 +202,7 @@ For GitLab.com, alternatively, when users need to [link SAML to their existing G
### Users receive a 404 **(PREMIUM SAAS)**
Because SAML SSO for groups is a paid feature, your subscription expiring can result in a `404` error when you're signing in using SAML SSO on GitLab.com.
If all users are receiving a `404` when attempting to log in using SAML, confirm
If all users are receiving a `404` when attempting to sign in using SAML, confirm
[there is an active subscription](../../../subscriptions/gitlab_com/index.md#view-your-gitlab-saas-subscription) being used in this SAML SSO namespace.
If you receive a `404` during setup when using "verify configuration", make sure you have used the correct

View File

@ -601,7 +601,7 @@ You can then tag the manifest list with `mygroup/myapp:1.0.0`.
### Troubleshoot as a GitLab server administrator
Troubleshooting the GitLab Container Registry, most of the times, requires
you to log in to GitLab server with administrator access.
you to sign in to GitLab server with administrator access.
[Read how to troubleshoot the Container Registry](../../../administration/packages/container_registry.md#troubleshooting).

View File

@ -74,7 +74,7 @@ you must authenticate against the Dependency Proxy.
Follow the [instructions for using images from a private registry](../../../ci/docker/using_docker_images.md#access-an-image-from-a-private-container-registry),
but instead of using `registry.example.com:5000`, use your GitLab domain with no port `gitlab.example.com`.
For example, to manually log in:
For example, to manually sign in:
```shell
docker login gitlab.example.com --username my_username --password my_password
@ -109,7 +109,7 @@ Proxy.
> - Automatic runner authentication, when using the Dependency Proxy to pull the image for the job, was [added](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27302) in GitLab 13.9.
> - The prefix for group names containing uppercase letters was [fixed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/54559) in GitLab 13.10.
Runners log in to the Dependency Proxy automatically. To pull through
Runners sign in to the Dependency Proxy automatically. To pull through
the Dependency Proxy, use one of the [predefined variables](../../../ci/variables/predefined_variables.md):
- `CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX` pulls through the top-level group.

View File

@ -19,7 +19,7 @@ You can view the Harbor Registry for a project or group.
You can search, sort, and filter images on this page. You can share a filtered view by copying the URL from your browser.
At the project level, you can see **CLI Commands** in the upper right corner, where you can copy
corresponding commands to log in, build images, and push images. **CLI Commands** is not shown at
corresponding commands to sign in, build images, and push images. **CLI Commands** is not shown at
the group level.
NOTE:

View File

@ -153,7 +153,7 @@ the components outlined above and the pre-loaded demo runbook.
```
1. After JupyterHub has been installed successfully, open the **Jupyter Hostname**
in your browser. Select **Sign in with GitLab** button to log in to
in your browser. Select **Sign in with GitLab** button to sign in to
JupyterHub and start the server. Authentication is enabled for any user of the
GitLab instance with OAuth2. This button redirects you to a page at GitLab
requesting authorization for JupyterHub to use your GitLab account.

View File

@ -205,7 +205,7 @@ Supported GitHub branch protection rules are mapped to GitLab branch protection
- GitHub rule **Require conversation resolution before merging** for the project's default branch is mapped to the [**All threads must be resolved** GitLab setting](../../discussions/index.md#prevent-merge-unless-all-threads-are-resolved). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/371110) in GitLab 15.5.
- Support for GitHub rule **Require a pull request before merging** is proposed in issue [370951](https://gitlab.com/gitlab-org/gitlab/-/issues/370951).
- Support for GitHub rule **Require signed commits** is proposed in issue [370949](https://gitlab.com/gitlab-org/gitlab/-/issues/370949).
- GitHub rule **Require signed commits** for the project's default branch is mapped to the **Reject unsigned commits** GitLab setting. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/370949) in GitLab 15.5.
- Support for GitHub rule **Require status checks to pass before merging** was proposed in issue [370948](https://gitlab.com/gitlab-org/gitlab/-/issues/370948). However, this rule cannot be translated during project import into GitLab due to technical difficulties.
You can still create [status checks](../merge_requests/status_checks.md) in GitLab yourself.

View File

@ -14,7 +14,7 @@ You can configure GitLab to send notifications to a Webex Teams space:
## Create a webhook for the space
1. Go to the [Incoming Webhooks app page](https://apphub.webex.com/applications/incoming-webhooks-cisco-systems-38054-23307).
1. Select **Connect** and log in to Webex Teams, if required.
1. Select **Connect**, and sign in to Webex Teams if required.
1. Enter a name for the webhook and select the space to receive the notifications.
1. Select **ADD**.
1. Copy the **Webhook URL**.

View File

@ -39,7 +39,7 @@ Now we have a different picture. [According to Josh Aas](https://letsencrypt.org
<!-- vale gitlab.rulename = YES -->
> _We've since come to realize that HTTPS is important for almost all websites. It's important for any website that allows people to log in with a password, any website that [tracks its users](https://www.washingtonpost.com/news/the-switch/wp/2013/12/10/nsa-uses-google-cookies-to-pinpoint-targets-for-hacking/) in any way, any website that [doesn't want its content altered](https://arstechnica.com/tech-policy/2014/09/why-comcasts-javascript-ad-injections-threaten-security-net-neutrality/), and for any site that offers content people might not want others to know they are consuming. We've also learned that any site not secured by HTTPS [can be used to attack other sites](https://krebsonsecurity.com/2015/04/dont-be-fodder-for-chinas-great-cannon/)._
> _We've since come to realize that HTTPS is important for almost all websites. It's important for any website that allows people to sign in with a password, any website that [tracks its users](https://www.washingtonpost.com/news/the-switch/wp/2013/12/10/nsa-uses-google-cookies-to-pinpoint-targets-for-hacking/) in any way, any website that [doesn't want its content altered](https://arstechnica.com/tech-policy/2014/09/why-comcasts-javascript-ad-injections-threaten-security-net-neutrality/), and for any site that offers content people might not want others to know they are consuming. We've also learned that any site not secured by HTTPS [can be used to attack other sites](https://krebsonsecurity.com/2015/04/dont-be-fodder-for-chinas-great-cannon/)._
Therefore, the reason why certificates are so important is that they encrypt
the connection between the **client** (you, your visitors)

View File

@ -5,22 +5,10 @@ module API
module Ml
module Mlflow
class Experiment < Grape::Entity
expose :experiment do
expose :experiment_id
expose :name
expose :lifecycle_stage
expose :artifact_location
end
private
def lifecycle_stage
object.deleted_on? ? 'deleted' : 'active'
end
def experiment_id
object.iid.to_s
end
expose(:experiment_id) { |experiment| experiment.iid.to_s }
expose :name
expose(:lifecycle_stage) { |experiment| experiment.deleted_on? ? 'deleted' : 'active' }
expose(:artifact_location) { |experiment| 'not_implemented' }
end
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module API
module Entities
module Ml
module Mlflow
class GetExperiment < Grape::Entity
expose :itself, using: Experiment, as: :experiment
end
end
end
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module API
module Entities
module Ml
module Mlflow
class ListExperiment < Grape::Entity
expose :experiments, with: Experiment
end
end
end
end
end

View File

@ -10,7 +10,7 @@ module API
private
def run_info
::API::Entities::Ml::Mlflow::RunInfo.represent object
RunInfo.represent object
end
end
end

View File

@ -9,21 +9,29 @@ module API
include APIGuard
# The first part of the url is the namespace, the second part of the URL is what the MLFlow client calls
MLFLOW_API_PREFIX = ':id/ml/mflow/api/2.0/mlflow/'
MLFLOW_API_PREFIX = ':id/ml/mlflow/api/2.0/mlflow/'
allow_access_with_scope :api
allow_access_with_scope :read_api, if: -> (request) { request.get? || request.head? }
before do
authenticate!
not_found! unless Feature.enabled?(:ml_experiment_tracking, user_project)
end
feature_category :mlops
content_type :json, 'application/json'
default_format :json
before do
# MLFlow Client considers any status code different than 200 an error, even 201
status 200
authenticate!
not_found! unless Feature.enabled?(:ml_experiment_tracking, user_project)
end
rescue_from ActiveRecord::ActiveRecordError do |e|
invalid_parameter!(e.message)
end
helpers do
def resource_not_found!
render_structured_api_error!({ error_code: 'RESOURCE_DOES_NOT_EXIST' }, 404)
@ -32,6 +40,34 @@ module API
def resource_already_exists!
render_structured_api_error!({ error_code: 'RESOURCE_ALREADY_EXISTS' }, 400)
end
def invalid_parameter!(message = nil)
render_structured_api_error!({ error_code: 'INVALID_PARAMETER_VALUE', message: message }, 400)
end
def experiment_repository
::Ml::ExperimentTracking::ExperimentRepository.new(user_project, current_user)
end
def candidate_repository
::Ml::ExperimentTracking::CandidateRepository.new(user_project, current_user)
end
def experiment
@experiment ||= find_experiment!(params[:experiment_id], params[:experiment_name])
end
def candidate
@candidate ||= find_candidate!(params[:run_id])
end
def find_experiment!(iid, name)
experiment_repository.by_iid_or_name(iid: iid, name: name) || resource_not_found!
end
def find_candidate!(iid)
candidate_repository.by_iid(iid) || resource_not_found!
end
end
params do
@ -44,33 +80,35 @@ module API
namespace MLFLOW_API_PREFIX do
resource :experiments do
desc 'Fetch experiment by experiment_id' do
success Entities::Ml::Mlflow::Experiment
success Entities::Ml::Mlflow::GetExperiment
detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#get-experiment'
end
params do
optional :experiment_id, type: String, default: '', desc: 'Experiment ID, in reference to the project'
end
get 'get', urgency: :low do
experiment = ::Ml::Experiment.by_project_id_and_iid(user_project.id, params[:experiment_id])
resource_not_found! unless experiment
present experiment, with: Entities::Ml::Mlflow::Experiment
present experiment, with: Entities::Ml::Mlflow::GetExperiment
end
desc 'Fetch experiment by experiment_name' do
success Entities::Ml::Mlflow::Experiment
success Entities::Ml::Mlflow::GetExperiment
detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#get-experiment-by-name'
end
params do
optional :experiment_name, type: String, default: '', desc: 'Experiment name'
end
get 'get-by-name', urgency: :low do
experiment = ::Ml::Experiment.by_project_id_and_name(user_project, params[:experiment_name])
present experiment, with: Entities::Ml::Mlflow::GetExperiment
end
resource_not_found! unless experiment
desc 'List experiments' do
success Entities::Ml::Mlflow::ListExperiment
detail 'https://www.mlflow.org/docs/latest/rest-api.html#list-experiments'
end
get 'list', urgency: :low do
response = { experiments: experiment_repository.all }
present experiment, with: Entities::Ml::Mlflow::Experiment
present response, with: Entities::Ml::Mlflow::ListExperiment
end
desc 'Create experiment' do
@ -83,13 +121,9 @@ module API
optional :tags, type: Array, desc: 'This will be ignored'
end
post 'create', urgency: :low do
resource_already_exists! if ::Ml::Experiment.has_record?(user_project.id, params[:name])
experiment = ::Ml::Experiment.create!(name: params[:name],
user: current_user,
project: user_project)
present experiment, with: Entities::Ml::Mlflow::NewExperiment
present experiment_repository.create!(params[:name]), with: Entities::Ml::Mlflow::NewExperiment
rescue ActiveRecord::RecordInvalid
resource_already_exists!
end
end
@ -109,153 +143,108 @@ module API
optional :tags, type: Array, desc: 'This will be ignored'
end
post 'create', urgency: :low do
experiment = ::Ml::Experiment.by_project_id_and_iid(user_project.id, params[:experiment_id].to_i)
resource_not_found! unless experiment
candidate = experiment.candidates.create!(
user: current_user,
start_time: params[:start_time] || 0
)
present candidate_repository.create!(experiment, params[:start_time]), with: Entities::Ml::Mlflow::Run
end
desc 'Gets an MLFlow Run, which maps to GitLab Candidates' do
success Entities::Ml::Mlflow::Run
detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#get-run'
end
params do
requires :run_id, type: String, desc: 'UUID of the candidate.'
optional :run_uuid, type: String, desc: 'This parameter is ignored'
end
get 'get', urgency: :low do
present candidate, with: Entities::Ml::Mlflow::Run
end
namespace do
after_validation do
@candidate = ::Ml::Candidate.with_project_id_and_iid(
user_project.id,
params[:run_id]
)
desc 'Updates a Run.' do
success Entities::Ml::Mlflow::UpdateRun
detail ['https://www.mlflow.org/docs/1.28.0/rest-api.html#update-run',
'MLFlow Runs map to GitLab Candidates']
end
params do
requires :run_id, type: String, desc: 'UUID of the candidate.'
optional :status, type: String,
values: ::Ml::Candidate.statuses.keys.map(&:upcase),
desc: "Status of the run. Accepts: " \
"#{::Ml::Candidate.statuses.keys.map(&:upcase)}."
optional :end_time, type: Integer, desc: 'Ending time of the run'
end
post 'update', urgency: :low do
candidate_repository.update(candidate, params[:status], params[:end_time])
resource_not_found! unless @candidate
end
present candidate, with: Entities::Ml::Mlflow::UpdateRun
end
desc 'Gets an MLFlow Run, which maps to GitLab Candidates' do
success Entities::Ml::Mlflow::Run
detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#get-run'
end
params do
requires :run_id, type: String, desc: 'UUID of the candidate.'
optional :run_uuid, type: String, desc: 'This parameter is ignored'
end
get 'get', urgency: :low do
present @candidate, with: Entities::Ml::Mlflow::Run
end
desc 'Logs a metric to a run.' do
summary 'Log a metric for a run. A metric is a key-value pair (string key, float value) with an '\
'associated timestamp. Examples include the various metrics that represent ML model accuracy. '\
'A metric can be logged multiple times.'
detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#log-metric'
end
params do
requires :run_id, type: String, desc: 'UUID of the run.'
requires :key, type: String, desc: 'Name for the metric.'
requires :value, type: Float, desc: 'Value of the metric.'
requires :timestamp, type: Integer, desc: 'Unix timestamp in milliseconds when metric was recorded'
optional :step, type: Integer, desc: 'Step at which the metric was recorded'
end
post 'log-metric', urgency: :low do
candidate_repository.add_metric!(
candidate,
params[:key],
params[:value],
params[:timestamp],
params[:step]
)
desc 'Updates a Run.' do
success Entities::Ml::Mlflow::UpdateRun
detail ['https://www.mlflow.org/docs/1.28.0/rest-api.html#update-run',
'MLFlow Runs map to GitLab Candidates']
end
params do
requires :run_id, type: String, desc: 'UUID of the candidate.'
optional :status, type: String,
values: ::Ml::Candidate.statuses.keys.map(&:upcase),
desc: "Status of the run. Accepts: " \
"#{::Ml::Candidate.statuses.keys.map(&:upcase)}."
optional :end_time, type: Integer, desc: 'Ending time of the run'
end
post 'update', urgency: :low do
@candidate.status = params[:status].downcase if params[:status]
@candidate.end_time = params[:end_time] if params[:end_time]
{}
end
@candidate.save
desc 'Logs a parameter to a run.' do
summary 'Log a param used for a run. A param is a key-value pair (string key, string value). '\
'Examples include hyperparameters used for ML model training and constant dates and values '\
'used in an ETL pipeline. A param can be logged only once for a run, duplicate will be .'\
'ignored'
present @candidate, with: Entities::Ml::Mlflow::UpdateRun
end
detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#log-param'
end
params do
requires :run_id, type: String, desc: 'UUID of the run.'
requires :key, type: String, desc: 'Name for the parameter.'
requires :value, type: String, desc: 'Value for the parameter.'
end
post 'log-parameter', urgency: :low do
bad_request! unless candidate_repository.add_param!(candidate, params[:key], params[:value])
desc 'Logs a metric to a run.' do
summary 'Log a metric for a run. A metric is a key-value pair (string key, float value) with an '\
'associated timestamp. Examples include the various metrics that represent ML model accuracy. '\
'A metric can be logged multiple times.'
detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#log-metric'
end
params do
requires :run_id, type: String, desc: 'UUID of the run.'
{}
end
desc 'Logs multiple parameters and metrics.' do
summary 'Log a batch of metrics and params for a run. Validation errors will block the entire batch, '\
'duplicate errors will be ignored.'
detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#log-param'
end
params do
requires :run_id, type: String, desc: 'UUID of the run.'
optional :metrics, type: Array, default: [] do
requires :key, type: String, desc: 'Name for the metric.'
requires :value, type: Float, desc: 'Value of the metric.'
requires :timestamp, type: Integer, desc: 'Unix timestamp in milliseconds when metric was recorded'
optional :step, type: Integer, desc: 'Step at which the metric was recorded'
end
post 'log-metric', urgency: :low do
@candidate.metrics.create!(
name: params[:key],
value: params[:value],
tracked_at: params[:timestamp],
step: params[:step]
)
{}
optional :params, type: Array, default: [] do
requires :key, type: String, desc: 'Name for the metric.'
requires :value, type: String, desc: 'Value of the metric.'
end
end
post 'log-batch', urgency: :low do
candidate_repository.add_metrics(candidate, params[:metrics])
candidate_repository.add_params(candidate, params[:params])
desc 'Logs a parameter to a run.' do
summary 'Log a param used for a run. A param is a key-value pair (string key, string value). '\
'Examples include hyperparameters used for ML model training and constant dates and values '\
'used in an ETL pipeline. A param can be logged only once for a run, duplicate will be .'\
'ignored'
detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#log-param'
end
params do
requires :run_id, type: String, desc: 'UUID of the run.'
requires :key, type: String, desc: 'Name for the parameter.'
requires :value, type: String, desc: 'Value for the parameter.'
end
post 'log-parameter', urgency: :low do
::Ml::CandidateParam.create(candidate: @candidate, name: params[:key], value: params[:value])
{}
end
desc 'Logs multiple parameters and metrics.' do
summary 'Log a batch of metrics and params for a run. Validation errors will block the entire batch, '\
'duplicate errors will be ignored.'
detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#log-param'
end
params do
requires :run_id, type: String, desc: 'UUID of the run.'
optional :metrics, type: Array, default: [] do
requires :key, type: String, desc: 'Name for the metric.'
requires :value, type: Float, desc: 'Value of the metric.'
requires :timestamp, type: Integer, desc: 'Unix timestamp in milliseconds when metric was recorded'
optional :step, type: Integer, desc: 'Step at which the metric was recorded'
end
optional :params, type: Array, default: [] do
requires :key, type: String, desc: 'Name for the metric.'
requires :value, type: String, desc: 'Value of the metric.'
end
end
post 'log-batch', urgency: :low do
times = { created_at: Time.zone.now, updated_at: Time.zone.now }
metrics = params[:metrics].map do |metric|
{
candidate_id: @candidate.id,
name: metric[:key],
value: metric[:value],
tracked_at: metric[:timestamp],
step: metric[:step],
**times
}
end
::Ml::CandidateMetric.insert_all(metrics, returning: false) unless metrics.empty?
parameters = params[:params].map do |p|
{
candidate_id: @candidate.id,
name: p[:key],
value: p[:value],
**times
}
end
::Ml::CandidateParam.insert_all(parameters, returning: false) unless parameters.empty?
{}
end
{}
end
end
end

View File

@ -121,11 +121,7 @@ module Gitlab
end
def observe_jobs_count_in_alive_pipelines
jobs_count = if limit_active_jobs_early?
project.all_pipelines.jobs_count_in_alive_pipelines
else
project.all_pipelines.builds_count_in_alive_pipelines
end
jobs_count = project.all_pipelines.jobs_count_in_alive_pipelines
metrics.active_jobs_histogram
.observe({ plan: project.actual_plan_name }, jobs_count)
@ -136,12 +132,6 @@ module Gitlab
.increment(reason: (reason || :unknown_failure).to_s)
end
def limit_active_jobs_early?
strong_memoize(:limit_active_jobs_early) do
Feature.enabled?(:ci_limit_active_jobs_early, project)
end
end
private
# Verifies that origin_ref is a fully qualified tag reference (refs/tags/<tag-name>)

View File

@ -13,7 +13,6 @@ module Gitlab
MESSAGE = "Project exceeded the allowed number of jobs in active pipelines. Retry later."
def perform!
return unless command.limit_active_jobs_early?
return unless limits.exceeded?(LIMIT_NAME, count_jobs_in_alive_pipelines)
error(MESSAGE, drop_reason: :job_activity_limit_exceeded)
@ -26,8 +25,6 @@ module Gitlab
end
def break?
return unless command.limit_active_jobs_early?
pipeline.errors.any?
end

View File

@ -1,23 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Limit
class JobActivity < Chain::Base
def perform!
# to be overridden in EE
end
def break?
false # to be overridden in EE
end
end
end
end
end
end
end
Gitlab::Ci::Pipeline::Chain::Limit::JobActivity.prepend_mod_with('Gitlab::Ci::Pipeline::Chain::Limit::JobActivity')

View File

@ -43,6 +43,9 @@ module Gitlab
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectPathChangedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectArchivedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectTransferedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker,
to: ::Projects::ProjectAttributesChangedEvent,
if: -> (event) { event.pages_related? }
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupTransferedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupPathChangedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupDeletedEvent

View File

@ -51,6 +51,7 @@ module Gitlab
def update_project_settings
update_setting_for_only_allow_merge_if_all_discussions_are_resolved
update_project_push_rule
end
def update_setting_for_only_allow_merge_if_all_discussions_are_resolved
@ -58,6 +59,15 @@ module Gitlab
project.update(only_allow_merge_if_all_discussions_are_resolved: true)
end
def update_project_push_rule
return unless project.licensed_feature_available?(:push_rules)
return unless protected_branch.required_signatures
push_rule = project.push_rule || project.build_push_rule
push_rule.update!(reject_unsigned_commits: true)
project.project_setting.update!(push_rule_id: push_rule.id)
end
end
end
end

View File

@ -9,7 +9,7 @@ module Gitlab
attr_reader :attributes
expose_attribute :id, :allow_force_pushes, :required_conversation_resolution
expose_attribute :id, :allow_force_pushes, :required_conversation_resolution, :required_signatures
# Builds a Branch Protection info from a GitHub API response.
# Resource structure details:
@ -21,7 +21,8 @@ module Gitlab
hash = {
id: branch_name,
allow_force_pushes: branch_protection.dig(:allow_force_pushes, :enabled),
required_conversation_resolution: branch_protection.dig(:required_conversation_resolution, :enabled)
required_conversation_resolution: branch_protection.dig(:required_conversation_resolution, :enabled),
required_signatures: branch_protection.dig(:required_signatures, :enabled)
}
new(hash)

View File

@ -14108,6 +14108,9 @@ msgstr ""
msgid "Do not force push over diverged refs. After the mirror is created, this setting can only be modified using the API. %{mirroring_docs_link_start}Learn more about this option%{link_closing_tag} and %{mirroring_api_docs_link_start}the API.%{link_closing_tag}"
msgstr ""
msgid "Do not show again"
msgstr ""
msgid "Do you want to remove this deploy key?"
msgstr ""
@ -36460,9 +36463,15 @@ msgstr ""
msgid "SecurityReports|scanned resources"
msgstr ""
msgid "SecurityTraining|Enable security training to learn how to fix vulnerabilities. View security training from selected educational providers relevant to the detected vulnerability."
msgstr ""
msgid "SecurityTraining|Primary Training"
msgstr ""
msgid "SecurityTraining|Resolve with security training"
msgstr ""
msgid "SecurityTraining|Training from this partner takes precedence when more than one training partner is enabled."
msgstr ""

View File

@ -6,18 +6,31 @@
"properties": {
"experiment": {
"type": "object",
"required" : [
"required": [
"experiment_id",
"name",
"artifact_location",
"lifecycle_stage"
],
"properties" : {
"experiment_id": { "type": "string" },
"name": { "type": "string" },
"artifact_location": { "type": "string" },
"lifecycle_stage": { "type": { "enum" : ["active", "deleted"] } }
"properties": {
"experiment_id": {
"type": "string"
},
"name": {
"type": "string"
},
"artifact_location": {
"type": "string"
},
"lifecycle_stage": {
"type": {
"enum": [
"active",
"deleted"
]
}
}
}
}
}
}
}

View File

@ -0,0 +1,39 @@
{
"type": "object",
"required": [
"experiments"
],
"properties": {
"experiments": {
"type": "array",
"items": {
"type": "object",
"required": [
"experiment_id",
"name",
"artifact_location",
"lifecycle_stage"
],
"properties": {
"experiment_id": {
"type": "string"
},
"name": {
"type": "string"
},
"artifact_location": {
"type": "string"
},
"lifecycle_stage": {
"type": {
"enum": [
"active",
"deleted"
]
}
}
}
}
}
}
}

View File

@ -53,6 +53,7 @@ describe('Output component', () => {
expect(iframe.exists()).toBe(true);
expect(iframe.element.getAttribute('sandbox')).toBe('');
expect(iframe.element.getAttribute('srcdoc')).toBe('<p>test</p>');
expect(iframe.element.getAttribute('scrolling')).toBe('auto');
});
it('renders multiple raw HTML outputs', () => {

View File

@ -13,7 +13,6 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::ActiveJobs do
::Gitlab::Ci::Pipeline::Chain::Command,
project: project,
current_user: user,
limit_active_jobs_early?: feature_flag_enabled,
save_incompleted: true,
pipeline_seed: pipeline_seed_double
)
@ -29,7 +28,6 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::ActiveJobs do
let(:existing_pipeline) { create(:ci_pipeline, project: project) }
let(:step) { described_class.new(pipeline, command) }
let(:feature_flag_enabled) { true }
let(:limit) { 10 }
subject { step.perform! }

View File

@ -100,19 +100,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Sequence do
expect(histogram).to have_received(:observe)
.with(hash_including(plan: project.actual_plan_name), 4)
end
context 'when feature flag ci_limit_active_jobs_early is disabled' do
before do
stub_feature_flags(ci_limit_active_jobs_early: false)
end
it 'counts all the active builds' do
subject.build!
expect(histogram).to have_received(:observe)
.with(hash_including(plan: project.actual_plan_name), 3)
end
end
end
end
end

View File

@ -7,12 +7,14 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
let(:branch_name) { 'protection' }
let(:allow_force_pushes_on_github) { true }
let(:required_conversation_resolution) { true }
let(:required_conversation_resolution) { false }
let(:required_signatures) { false }
let(:github_protected_branch) do
Gitlab::GithubImport::Representation::ProtectedBranch.new(
id: branch_name,
allow_force_pushes: allow_force_pushes_on_github,
required_conversation_resolution: required_conversation_resolution
required_conversation_resolution: required_conversation_resolution,
required_signatures: required_signatures
)
end
@ -54,6 +56,12 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
it 'does not change only_allow_merge_if_all_discussions_are_resolved' do
expect { importer.execute }.not_to change(project, :only_allow_merge_if_all_discussions_are_resolved)
end
it 'does not change push_rule for the project' do
expect(project).not_to receive(:push_rule)
importer.execute
end
end
context 'when branch is protected on GitLab' do
@ -115,6 +123,46 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
it_behaves_like 'does not change project attributes'
end
context 'when required_signatures rule is enabled' do
let(:required_signatures) { true }
let(:push_rules_feature_available?) { true }
before do
stub_licensed_features(push_rules: push_rules_feature_available?)
end
context 'when the push_rules feature is available', if: Gitlab.ee? do
context 'when project push_rules did previously exist' do
before do
create(:push_rule, project: project)
end
it 'updates push_rule reject_unsigned_commits attribute' do
expect { importer.execute }.to change { project.reload.push_rule.reject_unsigned_commits }.to(true)
end
end
context 'when project push_rules did not previously exist' do
it 'creates project push_rule with the enabled reject_unsigned_commits attribute' do
expect { importer.execute }.to change(project, :push_rule).from(nil)
expect(project.push_rule.reject_unsigned_commits).to be_truthy
end
end
end
context 'when the push_rules feature is not available' do
let(:push_rules_feature_available?) { false }
it_behaves_like 'does not change project attributes'
end
end
context 'when required_signatures rule is disabled' do
let(:required_signatures) { false }
it_behaves_like 'does not change project attributes'
end
end
context "when branch is not default" do
@ -129,6 +177,18 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
it_behaves_like 'does not change project attributes'
end
context 'when required_signatures rule is enabled' do
let(:required_signatures) { true }
it_behaves_like 'does not change project attributes'
end
context 'when required_signatures rule is disabled' do
let(:required_signatures) { false }
it_behaves_like 'does not change project attributes'
end
end
end
end

View File

@ -25,7 +25,10 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
describe '.from_api_response' do
let(:response) do
response = Struct.new(:url, :allow_force_pushes, :required_conversation_resolution, keyword_init: true)
response = Struct.new(
:url, :allow_force_pushes, :required_conversation_resolution, :required_signatures,
keyword_init: true
)
enabled_setting = Struct.new(:enabled, keyword_init: true)
response.new(
url: 'https://example.com/branches/main/protection',
@ -34,6 +37,9 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
),
required_conversation_resolution: enabled_setting.new(
enabled: true
),
required_signatures: enabled_setting.new(
enabled: true
)
)
end
@ -49,7 +55,8 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
{
'id' => 'main',
'allow_force_pushes' => true,
'required_conversation_resolution' => true
'required_conversation_resolution' => true,
'required_signatures' => true
}
end

View File

@ -7,6 +7,7 @@ RSpec.describe Ci::JobToken::ProjectScopeLink do
it { is_expected.to belong_to(:target_project) }
it { is_expected.to belong_to(:added_by) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
it_behaves_like 'cleanup by a loose foreign key' do
@ -97,14 +98,14 @@ RSpec.describe Ci::JobToken::ProjectScopeLink do
context 'loose foreign key on ci_job_token_project_scope_links.source_project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
let!(:parent) { create(:project) }
let!(:parent) { create(:project, namespace: group) }
let!(:model) { create(:ci_job_token_project_scope_link, source_project: parent) }
end
end
context 'loose foreign key on ci_job_token_project_scope_links.target_project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
let!(:parent) { create(:project) }
let!(:parent) { create(:project, namespace: group) }
let!(:model) { create(:ci_job_token_project_scope_link, target_project: parent) }
end
end

View File

@ -3,16 +3,19 @@
require 'spec_helper'
RSpec.describe Ml::Experiment do
let_it_be(:exp) { create(:ml_experiments) }
let_it_be(:exp2) { create(:ml_experiments, project: exp.project) }
let(:iid) { exp.iid }
let(:exp_name) { exp.name }
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:user) }
it { is_expected.to have_many(:candidates) }
end
describe '#by_project_id_and_iid?' do
let(:exp) { create(:ml_experiments) }
let(:iid) { exp.iid }
describe '#by_project_id_and_iid' do
subject { described_class.by_project_id_and_iid(exp.project_id, iid) }
context 'if exists' do
@ -26,10 +29,7 @@ RSpec.describe Ml::Experiment do
end
end
describe '#by_project_id_and_name?' do
let(:exp) { create(:ml_experiments) }
let(:exp_name) { exp.name }
describe '#by_project_id_and_name' do
subject { described_class.by_project_id_and_name(exp.project_id, exp_name) }
context 'if exists' do
@ -43,20 +43,17 @@ RSpec.describe Ml::Experiment do
end
end
describe '#has_record?' do
let(:exp) { create(:ml_experiments) }
let(:exp_name) { exp.name }
describe '#by_project_id' do
let(:project_id) { exp.project_id }
subject { described_class.has_record?(exp.project_id, exp_name) }
subject { described_class.by_project_id(project_id) }
context 'if exists' do
it { is_expected.to be_truthy }
end
it { is_expected.to match_array([exp, exp2]) }
context 'if does not exist' do
let(:exp_name) { 'hello' }
context 'when project does not have experiment' do
let(:project_id) { non_existing_record_iid }
it { is_expected.to be_falsey }
it { is_expected.to be_empty }
end
end
end

View File

@ -139,9 +139,9 @@ RSpec.describe API::Ml::Mlflow do
end
end
describe 'GET /projects/:id/ml/mflow/api/2.0/mlflow/experiments/get' do
describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/get' do
let(:experiment_iid) { experiment.iid.to_s }
let(:route) { "/projects/#{project_id}/ml/mflow/api/2.0/mlflow/experiments/get?experiment_id=#{experiment_iid}" }
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/get?experiment_id=#{experiment_iid}" }
it 'returns the experiment', :aggregate_failures do
expect(response).to have_gitlab_http_status(:ok)
@ -165,7 +165,7 @@ RSpec.describe API::Ml::Mlflow do
end
context 'and experiment_id is not passed' do
let(:route) { "/projects/#{project_id}/ml/mflow/api/2.0/mlflow/experiments/get" }
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/get" }
it_behaves_like 'Not Found - Resource Does Not Exist'
end
@ -176,10 +176,40 @@ RSpec.describe API::Ml::Mlflow do
end
end
describe 'GET /projects/:id/ml/mflow/api/2.0/mlflow/experiments/get-by-name' do
describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/list' do
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/list" }
it 'returns the experiments' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('ml/list_experiments')
expect(json_response).to include({
'experiments' => [
'experiment_id' => experiment.iid.to_s,
'name' => experiment.name,
'lifecycle_stage' => 'active',
'artifact_location' => 'not_implemented'
]
})
end
context 'when there are no experiments' do
let(:project_id) { another_project.id }
it 'returns an empty list' do
expect(json_response).to include({ 'experiments' => [] })
end
end
describe 'Error States' do
it_behaves_like 'shared error cases'
it_behaves_like 'Requires read_api scope'
end
end
describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/get-by-name' do
let(:experiment_name) { experiment.name }
let(:route) do
"/projects/#{project_id}/ml/mflow/api/2.0/mlflow/experiments/get-by-name?experiment_name=#{experiment_name}"
"/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/get-by-name?experiment_name=#{experiment_name}"
end
it 'returns the experiment', :aggregate_failures do
@ -203,7 +233,7 @@ RSpec.describe API::Ml::Mlflow do
end
context 'when has access but experiment_name is not passed' do
let(:route) { "/projects/#{project_id}/ml/mflow/api/2.0/mlflow/experiments/get-by-name" }
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/get-by-name" }
it_behaves_like 'Not Found - Resource Does Not Exist'
end
@ -213,16 +243,16 @@ RSpec.describe API::Ml::Mlflow do
end
end
describe 'POST /projects/:id/ml/mflow/api/2.0/mlflow/experiments/create' do
describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/create' do
let(:route) do
"/projects/#{project_id}/ml/mflow/api/2.0/mlflow/experiments/create"
"/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/create"
end
let(:params) { { name: 'new_experiment' } }
let(:request) { post api(route), params: params, headers: headers }
it 'creates the experiment', :aggregate_failures do
expect(response).to have_gitlab_http_status(:created)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include('experiment_id' )
end
@ -244,7 +274,7 @@ RSpec.describe API::Ml::Mlflow do
end
context 'when project does not exist' do
let(:route) { "/projects/#{non_existing_record_id}/ml/mflow/api/2.0/mlflow/experiments/create" }
let(:route) { "/projects/#{non_existing_record_id}/ml/mlflow/api/2.0/mlflow/experiments/create" }
it_behaves_like 'Not Found', '404 Project Not Found'
end
@ -255,8 +285,8 @@ RSpec.describe API::Ml::Mlflow do
end
describe 'Runs' do
describe 'POST /projects/:id/ml/mflow/api/2.0/mlflow/runs/create' do
let(:route) { "/projects/#{project_id}/ml/mflow/api/2.0/mlflow/runs/create" }
describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/create' do
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/create" }
let(:params) { { experiment_id: experiment.iid.to_s, start_time: Time.now.to_i } }
let(:request) { post api(route), params: params, headers: headers }
@ -270,7 +300,7 @@ RSpec.describe API::Ml::Mlflow do
'lifecycle_stage' => "active"
}
expect(response).to have_gitlab_http_status(:created)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('ml/run')
expect(json_response['run']).to include('info' => hash_including(**expected_properties),
'data' => { 'metrics' => [], 'params' => [] })
@ -300,8 +330,8 @@ RSpec.describe API::Ml::Mlflow do
end
end
describe 'GET /projects/:id/ml/mflow/api/2.0/mlflow/runs/get' do
let(:route) { "/projects/#{project_id}/ml/mflow/api/2.0/mlflow/runs/get" }
describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/runs/get' do
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/get" }
let(:default_params) { { 'run_id' => candidate.iid } }
it 'gets the run', :aggregate_failures do
@ -314,7 +344,7 @@ RSpec.describe API::Ml::Mlflow do
'lifecycle_stage' => "active"
}
expect(response).to have_gitlab_http_status(:success)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('ml/run')
expect(json_response['run']).to include(
'info' => hash_including(**expected_properties),
@ -337,10 +367,10 @@ RSpec.describe API::Ml::Mlflow do
end
end
describe 'POST /projects/:id/ml/mflow/api/2.0/mlflow/runs/update' do
describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/update' do
let(:default_params) { { run_id: candidate.iid.to_s, status: 'FAILED', end_time: Time.now.to_i } }
let(:request) { post api(route), params: params, headers: headers }
let(:route) { "/projects/#{project_id}/ml/mflow/api/2.0/mlflow/runs/update" }
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/update" }
it 'updates the run', :aggregate_failures do
expected_properties = {
@ -353,7 +383,7 @@ RSpec.describe API::Ml::Mlflow do
'lifecycle_stage' => 'active'
}
expect(response).to have_gitlab_http_status(:success)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('ml/update_run')
expect(json_response).to include('run_info' => hash_including(**expected_properties))
end
@ -377,15 +407,15 @@ RSpec.describe API::Ml::Mlflow do
end
end
describe 'POST /projects/:id/ml/mflow/api/2.0/mlflow/runs/log-metric' do
let(:route) { "/projects/#{project_id}/ml/mflow/api/2.0/mlflow/runs/log-metric" }
describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/log-metric' do
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/log-metric" }
let(:default_params) { { run_id: candidate.iid.to_s, key: 'some_key', value: 10.0, timestamp: Time.now.to_i } }
let(:request) { post api(route), params: params, headers: headers }
it 'logs the metric', :aggregate_failures do
candidate.metrics.reload
expect(response).to have_gitlab_http_status(:success)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
expect(candidate.metrics.length).to eq(3)
end
@ -398,32 +428,26 @@ RSpec.describe API::Ml::Mlflow do
end
end
describe 'POST /projects/:id/ml/mflow/api/2.0/mlflow/runs/log-parameter' do
let(:route) { "/projects/#{project_id}/ml/mflow/api/2.0/mlflow/runs/log-parameter" }
describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/log-parameter' do
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/log-parameter" }
let(:default_params) { { run_id: candidate.iid.to_s, key: 'some_key', value: 'value' } }
let(:request) { post api(route), params: params, headers: headers }
it 'logs the parameter', :aggregate_failures do
candidate.params.reload
expect(response).to have_gitlab_http_status(:success)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
expect(candidate.params.length).to eq(3)
end
context 'when parameter was already logged' do
let(:params) { default_params.tap { |p| p[:key] = candidate.params[0].name } }
it 'does not log', :aggregate_failures do
candidate.params.reload
expect(response).to have_gitlab_http_status(:success)
expect(json_response).to be_empty
expect(candidate.params.length).to eq(2)
end
end
describe 'Error Cases' do
context 'when parameter was already logged' do
let(:params) { default_params.tap { |p| p[:key] = candidate.params[0].name } }
it_behaves_like 'Bad Request'
end
it_behaves_like 'shared error cases'
it_behaves_like 'Requires api scope'
it_behaves_like 'run_id param error cases'
@ -431,12 +455,12 @@ RSpec.describe API::Ml::Mlflow do
end
end
describe 'POST /projects/:id/ml/mflow/api/2.0/mlflow/runs/log-batch' do
describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/log-batch' do
let(:candidate2) do
create(:ml_candidates, user: experiment.user, start_time: 1234, experiment: experiment)
end
let(:route) { "/projects/#{project_id}/ml/mflow/api/2.0/mlflow/runs/log-batch" }
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/log-batch" }
let(:default_params) do
{
run_id: candidate2.iid.to_s,
@ -451,7 +475,7 @@ RSpec.describe API::Ml::Mlflow do
let(:request) { post api(route), params: params, headers: headers }
it 'logs parameters and metrics', :aggregate_failures do
expect(response).to have_gitlab_http_status(:success)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
expect(candidate2.params.size).to eq(1)
expect(candidate2.metrics.size).to eq(2)
@ -465,7 +489,7 @@ RSpec.describe API::Ml::Mlflow do
it 'does not log', :aggregate_failures do
candidate.params.reload
expect(response).to have_gitlab_http_status(:success)
expect(response).to have_gitlab_http_status(:ok)
expect(candidate2.params.size).to eq(1)
end
end

View File

@ -0,0 +1,199 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Ml::ExperimentTracking::CandidateRepository do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) }
let_it_be(:candidate) { create(:ml_candidates, user: user, experiment: experiment) }
let(:repository) { described_class.new(project, user) }
describe '#by_iid' do
let(:iid) { candidate.iid }
subject { repository.by_iid(iid) }
it { is_expected.to eq(candidate) }
context 'when iid does not exist' do
let(:iid) { non_existing_record_iid.to_s }
it { is_expected.to be_nil }
end
context 'when iid belongs to a different project' do
let(:repository) { described_class.new(create(:project), user) }
it { is_expected.to be_nil }
end
end
describe '#create!' do
subject { repository.create!(experiment, 1234) }
it 'creates the candidate' do
expect(subject.start_time).to eq(1234)
expect(subject.iid).not_to be_nil
expect(subject.end_time).to be_nil
end
end
describe '#update' do
let(:end_time) { 123456 }
let(:status) { 'running' }
subject { repository.update(candidate, status, end_time) }
it { is_expected.to be_truthy }
context 'when end_time is missing ' do
let(:end_time) { nil }
it { is_expected.to be_truthy }
end
context 'when status is wrong' do
let(:status) { 's' }
it 'fails assigning the value' do
expect { subject }.to raise_error(ArgumentError)
end
end
context 'when status is missing' do
let(:status) { nil }
it { is_expected.to be_truthy }
end
end
describe '#add_metric!' do
let(:props) { { name: 'abc', value: 1234, tracked: 12345678, step: 0 } }
let(:metrics_before) { candidate.metrics.size }
before do
metrics_before
end
subject { repository.add_metric!(candidate, props[:name], props[:value], props[:tracked], props[:step]) }
it 'adds a new metric' do
expect { subject }.to change { candidate.metrics.size }.by(1)
end
context 'when name missing' do
let(:props) { { value: 1234, tracked: 12345678, step: 0 } }
it 'does not add metric' do
expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
end
end
end
describe '#add_param!' do
let(:props) { { name: 'abc', value: 'def' } }
subject { repository.add_param!(candidate, props[:name], props[:value]) }
it 'adds a new param' do
expect { subject }.to change { candidate.params.size }.by(1)
end
context 'when name missing' do
let(:props) { { value: 1234 } }
it 'throws RecordInvalid' do
expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
end
end
context 'when param was already added' do
it 'throws RecordInvalid' do
repository.add_param!(candidate, 'new', props[:value])
expect { repository.add_param!(candidate, 'new', props[:value]) }.to raise_error(ActiveRecord::RecordInvalid)
end
end
end
describe "#add_params" do
let(:params) do
[{ key: 'model_class', value: 'LogisticRegression' }, { 'key': 'pythonEnv', value: '3.10' }]
end
subject { repository.add_params(candidate, params) }
it 'adds the parameters' do
expect { subject }.to change { candidate.reload.params.size }.by(2)
end
context 'if parameter misses key' do
let(:params) do
[{ value: 'LogisticRegression' }]
end
it 'does not throw and does not add' do
expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
end
end
context 'if parameter misses value' do
let(:params) do
[{ key: 'pythonEnv2' }]
end
it 'does not throw and does not add' do
expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
end
end
context 'if parameter repeated do' do
let(:params) do
[
{ 'key': 'pythonEnv0', value: '2.7' },
{ 'key': 'pythonEnv1', value: '3.9' },
{ 'key': 'pythonEnv1', value: '3.10' }
]
end
before do
repository.add_param!(candidate, 'pythonEnv0', '0')
end
it 'does not throw and adds only the first of each kind' do
expect { subject }.to change { candidate.reload.params.size }.by(1)
end
end
end
describe "#add_metrics" do
let(:metrics) do
[
{ key: 'mae', value: 2.5, timestamp: 1552550804 },
{ key: 'rmse', value: 2.7, timestamp: 1552550804 }
]
end
subject { repository.add_metrics(candidate, metrics) }
it 'adds the metrics' do
expect { subject }.to change { candidate.reload.metrics.size }.by(2)
end
context 'when metrics have repeated keys' do
let(:metrics) do
[
{ key: 'mae', value: 2.5, timestamp: 1552550804 },
{ key: 'rmse', value: 2.7, timestamp: 1552550804 },
{ key: 'mae', value: 2.7, timestamp: 1552550805 }
]
end
it 'adds all of them' do
expect { subject }.to change { candidate.reload.metrics.size }.by(3)
end
end
end
end

View File

@ -0,0 +1,85 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) }
let_it_be(:experiment2) { create(:ml_experiments, user: user, project: project) }
let_it_be(:experiment3) { create(:ml_experiments, user: user, project: project) }
let_it_be(:experiment4) { create(:ml_experiments, user: user) }
let(:repository) { described_class.new(project, user) }
describe '#by_iid_or_name' do
let(:iid) { experiment.iid }
let(:name) { nil }
subject { repository.by_iid_or_name(iid: iid, name: name) }
context 'when iid passed' do
it('fetches the experiment') { is_expected.to eq(experiment) }
context 'and name passed' do
let(:name) { experiment2.name }
it('ignores the name') { is_expected.to eq(experiment) }
end
context 'and does not exist' do
let(:iid) { non_existing_record_iid }
it { is_expected.to eq(nil) }
end
end
context 'when iid is not passed', 'and name is passed' do
let(:iid) { nil }
context 'when name exists' do
let(:name) { experiment2.name }
it('fetches the experiment') { is_expected.to eq(experiment2) }
end
context 'when name does not exist' do
let(:name) { non_existing_record_iid }
it { is_expected.to eq(nil) }
end
end
end
describe '#all' do
it 'fetches experiments for project' do
expect(repository.all).to match_array([experiment, experiment2, experiment3])
end
end
describe '#create!' do
let(:name) { 'hello' }
subject { repository.create!(name) }
it 'creates the candidate' do
expect { subject }.to change { repository.all.size }.by(1)
end
context 'when name exists' do
let(:name) { experiment.name }
it 'throws error' do
expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
end
end
context 'when name is missing' do
let(:name) { nil }
it 'throws error' do
expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
end
end
end
end

View File

@ -165,6 +165,38 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
{ type: :namespace, id: 3 }
]
context 'when project attributes change' do
Projects::ProjectAttributesChangedEvent::PAGES_RELATED_ATTRIBUTES.each do |attribute|
it_behaves_like 'clears caches with',
event_class: Projects::ProjectAttributesChangedEvent,
event_data: {
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
attributes: [attribute]
},
caches: [
{ type: :project, id: 1 },
{ type: :namespace, id: 3 }
]
end
it 'does not clear the cache when the attributes is not pages related' do
event = Projects::ProjectAttributesChangedEvent.new(
data: {
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
attributes: ['unknown']
}
)
expect(described_class).not_to receive(:clear_cache)
::Gitlab::EventStore.publish(event)
end
end
context 'when namespace based cache keys are duplicated' do
# de-dups namespace cache keys
it_behaves_like 'clears caches with',

View File

@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe RunPipelineScheduleWorker do
describe '#perform' do
let_it_be(:project) { create(:project) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, namespace: group) }
let_it_be(:user) { create(:user) }
let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project ) }