Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-04-14 15:15:22 +00:00
parent 8a5138ed7d
commit cdb41961fd
148 changed files with 2736 additions and 1196 deletions

View File

@ -433,11 +433,19 @@ lib/gitlab/checks/** @proglottis @toon
/doc/administration/logs/index.md @msedlakjakubowski
/doc/administration/maintenance_mode/ @axil
/doc/administration/merge_request_diffs.md @aqualls
/doc/administration/monitoring/ @msedlakjakubowski
/doc/administration/monitoring/github_imports.md @eread
/doc/administration/monitoring/gitlab_self_monitoring_project/ @msedlakjakubowski
/doc/administration/monitoring/index.md @msedlakjakubowski
/doc/administration/monitoring/ip_allowlist.md @jglassman1
/doc/administration/monitoring/performance/gitlab_configuration.md @msedlakjakubowski
/doc/administration/monitoring/performance/grafana_configuration.md @msedlakjakubowski
/doc/administration/monitoring/performance/index.md @jglassman1
/doc/administration/monitoring/performance/performance_bar.md @jglassman1
/doc/administration/monitoring/prometheus/gitlab_exporter.md @jglassman1
/doc/administration/monitoring/prometheus/index.md @axil
/doc/administration/monitoring/prometheus/pgbouncer_exporter.md @aqualls
/doc/administration/monitoring/prometheus/postgres_exporter.md @aqualls
/doc/administration/monitoring/prometheus/registry_exporter.md @marcel.amirault
/doc/administration/monitoring/prometheus/web_exporter.md @jglassman1
/doc/administration/nfs.md @axil
/doc/administration/object_storage.md @axil
@ -465,6 +473,7 @@ lib/gitlab/checks/** @proglottis @toon
/doc/administration/server_hooks.md @eread
/doc/administration/sidekiq/ @axil
/doc/administration/sidekiq/sidekiq_memory_killer.md @jglassman1
/doc/administration/silent_mode/ @axil
/doc/administration/smime_signing_email.md @axil
/doc/administration/snippets/ @aqualls
/doc/administration/static_objects_external_storage.md @ashrafkhamis
@ -655,7 +664,6 @@ lib/gitlab/checks/** @proglottis @toon
/doc/ci/pipeline_editor/ @marcel.amirault
/doc/ci/pipelines/downstream_pipelines.md @marcel.amirault
/doc/ci/pipelines/index.md @marcel.amirault
/doc/ci/pipelines/job_artifacts.md @marcel.amirault
/doc/ci/pipelines/pipeline_architectures.md @marcel.amirault
/doc/ci/pipelines/pipeline_artifacts.md @marcel.amirault
/doc/ci/quick_start/ @marcel.amirault
@ -795,6 +803,7 @@ lib/gitlab/checks/** @proglottis @toon
/doc/raketasks/spdx.md @rdickenson
/doc/raketasks/x509_signatures.md @aqualls
/doc/security/ @jglassman1
/doc/security/email_verification.md @phillipwells
/doc/subscriptions/ @fneill
/doc/subscriptions/gitlab_dedicated/ @drcatherinepope
/doc/topics/authentication/ @jglassman1
@ -806,6 +815,9 @@ lib/gitlab/checks/** @proglottis @toon
/doc/topics/plan_and_track.md @msedlakjakubowski
/doc/topics/your_work.md @sselhorn
/doc/tutorials/ @kpaizee
/doc/tutorials/create_compliance_pipeline.md @eread
/doc/tutorials/fuzz_testing_tutorial.md @rdickenson
/doc/tutorials/scan_result_policy.md @dianalogan
/doc/update/ @axil
/doc/update/background_migrations.md @aqualls
/doc/user/admin_area/analytics/ @lciutacu
@ -844,6 +856,7 @@ lib/gitlab/checks/** @proglottis @toon
/doc/user/admin_area/settings/rate_limit_on_projects_api.md @lciutacu
/doc/user/admin_area/settings/rate_limit_on_users_api.md @jglassman1
/doc/user/admin_area/settings/scim_setup.md @jglassman1
/doc/user/admin_area/settings/security_and_compliance.md @rdickenson
/doc/user/admin_area/settings/terraform_limits.md @phillipwells
/doc/user/admin_area/settings/third_party_offers.md @lciutacu
/doc/user/admin_area/settings/usage_statistics.md @lciutacu
@ -879,6 +892,7 @@ lib/gitlab/checks/** @proglottis @toon
/doc/user/group/issues_analytics/ @msedlakjakubowski
/doc/user/group/iterations/ @msedlakjakubowski
/doc/user/group/manage.md @lciutacu
/doc/user/group/moderate_users.md @phillipwells
/doc/user/group/planning_hierarchy/ @msedlakjakubowski
/doc/user/group/reporting/ @phillipwells
/doc/user/group/repositories_analytics/ @drcatherinepope
@ -910,7 +924,7 @@ lib/gitlab/checks/** @proglottis @toon
/doc/user/project/changelogs.md @aqualls
/doc/user/project/clusters/ @phillipwells
/doc/user/project/code_intelligence.md @aqualls
/doc/user/project/code_owners.md @aqualls
/doc/user/project/codeowners/ @aqualls
/doc/user/project/deploy_boards.md @phillipwells
/doc/user/project/deploy_keys/ @phillipwells
/doc/user/project/deploy_tokens/ @phillipwells
@ -953,6 +967,7 @@ lib/gitlab/checks/** @proglottis @toon
/doc/user/project/settings/import_export_troubleshooting.md @eread
/doc/user/project/settings/index.md @lciutacu
/doc/user/project/settings/project_access_tokens.md @jglassman1
/doc/user/project/system_notes.md @aqualls
/doc/user/project/time_tracking.md @msedlakjakubowski
/doc/user/project/web_ide/ @ashrafkhamis
/doc/user/project/web_ide_beta/ @ashrafkhamis
@ -967,6 +982,7 @@ lib/gitlab/checks/** @proglottis @toon
/doc/user/tasks.md @msedlakjakubowski
/doc/user/todos.md @msedlakjakubowski
/doc/user/usage_quotas.md @fneill
/doc/user/workspace/quick_start/ @ashrafkhamis
# End rake-managed-docs-block
[Authentication and Authorization]

View File

@ -133,7 +133,9 @@ retrieve-frontend-fixtures:
stage: fixtures
needs: ["setup-test-env", "retrieve-tests-metadata", "retrieve-frontend-fixtures"]
variables:
CRYSTALBALL: "false"
# Don't add `CRYSTALBALL: "false"` here as we're enabling Crystalball for scheduled pipelines (in `.gitlab-ci.yml`), so that we get coverage data
# for the `frontend fixture RSpec files` that will be added to the Crystalball mapping in `update-tests-metadata`.
# More information in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74003.
WEBPACK_VENDOR_DLL: "true"
script:
- source scripts/utils.sh
@ -181,6 +183,7 @@ rspec-all frontend_fixture as-if-foss:
- .frontend:rules:frontend_fixture-as-if-foss
- .as-if-foss
variables:
# We explicitely disable Crystalball here so as even in scheduled pipelines we don't need it since it's already enabled for `rspec-all frontend_fixture` there.
CRYSTALBALL: "false"
WEBPACK_VENDOR_DLL: "true"
KNAPSACK_GENERATE_REPORT: ""

View File

@ -63,7 +63,6 @@ stages:
QA_INTERCEPT_REQUESTS: "true"
GITLAB_LICENSE_MODE: test
GITLAB_QA_ADMIN_ACCESS_TOKEN: $QA_ADMIN_ACCESS_TOKEN
GITLAB_QA_OPTS: $EXTRA_GITLAB_QA_OPTS
# todo: remove in 16.1 milestone when not needed for backwards compatibility anymore
EE_LICENSE: $QA_EE_LICENSE
GITHUB_ACCESS_TOKEN: $QA_GITHUB_ACCESS_TOKEN
@ -234,6 +233,28 @@ _quarantine:
variables:
QA_RSPEC_TAGS: --tag quarantine
# Temporary test job to support the effort of migrating to Super Sidebar
# https://gitlab.com/groups/gitlab-org/-/epics/9044
_super-sidebar-nav:
extends:
- .qa
- .parallel
variables:
QA_SCENARIO: Test::Instance::Image
QA_KNAPSACK_REPORT_NAME: ee-instance
QA_TESTS: ""
QA_SUPER_SIDEBAR_ENABLED: "true"
QA_ALLURE_RESULTS_DIRECTORY: tmp/allure-results-super-sidebar
QA_EXPORT_TEST_METRICS: "false"
QA_DISABLE_RSPEC_RETRY: "true"
GITLAB_QA_OPTS: --set-feature-flags super_sidebar_nav=enabled
RSPEC_REPORT_OPTS: "--format documentation"
SKIP_REPORT_IN_ISSUES: "true"
allow_failure: true
rules:
- if: $CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_PATH == "gitlab-org/gitlab" && $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH
- !reference [.rules:test:manual, rules]
# ------------------------------------------
# FF changes
# ------------------------------------------
@ -304,7 +325,7 @@ decomposition-single-db-selective:
extends: .qa
variables:
QA_SCENARIO: Test::Instance::Image
GITLAB_QA_OPTS: --omnibus-config decomposition_single_db $EXTRA_GITLAB_QA_OPTS
GITLAB_QA_OPTS: --omnibus-config decomposition_single_db
rules:
- !reference [.rules:test:qa-selective, rules]
- if: $QA_SUITES =~ /Test::Instance::All/
@ -321,7 +342,7 @@ decomposition-multiple-db-selective:
variables:
QA_SCENARIO: Test::Instance::Image
GITLAB_ALLOW_SEPARATE_CI_DATABASE: "true"
GITLAB_QA_OPTS: --omnibus-config decomposition_multiple_db $EXTRA_GITLAB_QA_OPTS
GITLAB_QA_OPTS: --omnibus-config decomposition_multiple_db
rules:
- !reference [.rules:test:qa-selective, rules]
- if: $QA_SUITES =~ /Test::Instance::All/
@ -338,7 +359,7 @@ object-storage-selective:
variables:
QA_SCENARIO: Test::Instance::Image
QA_RSPEC_TAGS: --tag object_storage
GITLAB_QA_OPTS: --omnibus-config object_storage $EXTRA_GITLAB_QA_OPTS
GITLAB_QA_OPTS: --omnibus-config object_storage
rules:
- !reference [.rules:test:qa-selective, rules]
- if: $QA_SUITES =~ /Test::Instance::ObjectStorage/
@ -356,7 +377,7 @@ object-storage-aws-selective:
AWS_S3_BUCKET_NAME: $QA_AWS_S3_BUCKET_NAME
AWS_S3_KEY_ID: $QA_AWS_S3_KEY_ID
AWS_S3_REGION: $QA_AWS_S3_REGION
GITLAB_QA_OPTS: --omnibus-config object_storage_aws $EXTRA_GITLAB_QA_OPTS
GITLAB_QA_OPTS: --omnibus-config object_storage_aws
object-storage-aws:
extends: object-storage-aws-selective
parallel: 2
@ -370,7 +391,7 @@ object-storage-gcs-selective:
GOOGLE_PROJECT: $QA_GOOGLE_PROJECT
GOOGLE_JSON_KEY: $QA_GOOGLE_JSON_KEY
GOOGLE_CLIENT_EMAIL: $QA_GOOGLE_CLIENT_EMAIL
GITLAB_QA_OPTS: --omnibus-config object_storage_gcs $EXTRA_GITLAB_QA_OPTS
GITLAB_QA_OPTS: --omnibus-config object_storage_gcs
object-storage-gcs:
extends: object-storage-gcs-selective
parallel: 2
@ -382,7 +403,7 @@ packages-selective:
variables:
QA_SCENARIO: Test::Instance::Image
QA_RSPEC_TAGS: --tag packages
GITLAB_QA_OPTS: --omnibus-config packages $EXTRA_GITLAB_QA_OPTS
GITLAB_QA_OPTS: --omnibus-config packages
rules:
- !reference [.rules:test:qa-selective, rules]
- if: $QA_SUITES =~ /Test::Instance::Packages/
@ -629,7 +650,7 @@ registry-object-storage-tls:
QA_SCENARIO: Test::Integration::RegistryTLS
QA_RSPEC_TAGS: ""
GITLAB_TLS_CERTIFICATE: $QA_GITLAB_TLS_CERTIFICATE
GITLAB_QA_OPTS: --omnibus-config registry_object_storage $EXTRA_GITLAB_QA_OPTS
GITLAB_QA_OPTS: --omnibus-config registry_object_storage
importers:
extends: .qa
@ -650,10 +671,27 @@ e2e-test-report:
- .rules:report:allure-report
stage: report
variables:
ALLURE_JOB_NAME: e2e-package-and-test
GITLAB_AUTH_TOKEN: $PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE
ALLURE_PROJECT_PATH: $CI_PROJECT_PATH
ALLURE_MERGE_REQUEST_IID: $CI_MERGE_REQUEST_IID
# Temporary separate test report for super-sidebar test job
# TODO: remove once super-sidebar is on by default and enabled in tests
# https://gitlab.com/groups/gitlab-org/-/epics/9044
e2e-test-report-super-sidebar:
extends:
- .generate-allure-report-base
stage: report
needs:
- _super-sidebar-nav
variables:
ALLURE_JOB_NAME: e2e-super-sidebar
ALLURE_RESULTS_GLOB: gitlab-qa-run-*/**/allure-results-super-sidebar
rules:
- if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH
- !reference [.rules:test:manual, rules]
upload-knapsack-report:
extends:
- .generate-knapsack-report-base

View File

@ -68,7 +68,6 @@ e2e:package-and-test-ee:
RELEASE: "${REGISTRY_HOST}/${REGISTRY_GROUP}/build/omnibus-gitlab-mirror/gitlab-ee:${CI_COMMIT_SHA}"
GITLAB_QA_IMAGE: "${CI_REGISTRY_IMAGE}/gitlab-ee-qa:${CI_COMMIT_SHA}"
RUN_WITH_BUNDLE: "true" # instructs pipeline to install and run gitlab-qa gem via bundler
ALLURE_JOB_NAME: e2e-package-and-test
QA_PATH: qa # sets the optional path for bundler to run from
QA_RUN_TYPE: e2e-package-and-test
PIPELINE_NAME: E2E Omnibus GitLab EE
@ -107,20 +106,6 @@ e2e:package-and-test-ce:
GITLAB_QA_IMAGE: ${CI_REGISTRY_IMAGE}/gitlab-ce-qa:${CI_COMMIT_SHA}
PIPELINE_NAME: E2E Omnibus GitLab CE
e2e:package-and-test-super-sidebar:
extends: e2e:package-and-test-ee
variables:
QA_SUPER_SIDEBAR_ENABLED: "true"
QA_RUN_TYPE: e2e-package-and-test-super-sidebar
EXTRA_GITLAB_QA_OPTS: --set-feature-flags super_sidebar_nav=enabled
ALLURE_JOB_NAME: e2e-package-and-test-super-sidebar
PIPELINE_NAME: E2E Omnibus Super Sidebar
rules:
- if: $CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_PATH == "gitlab-org/gitlab" && $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH
allow_failure: true
- when: manual
allow_failure: true
e2e:test-on-gdk:
extends:
- .qa:rules:e2e:test-on-gdk

View File

@ -127,7 +127,7 @@ export default {
data-testid="vsa-metrics-group"
>
<h4 class="gl-my-0">{{ group.title }}</h4>
<div class="gl-display-flex gl-flex-wrap-wrap">
<div class="gl-display-flex gl-flex-wrap">
<metric-tile
v-for="metric in group.data"
:key="metric.identifier"
@ -142,7 +142,7 @@ export default {
</div>
</div>
</div>
<div v-else class="gl-display-flex gl-flex-wrap-wrap gl-mb-7">
<div v-else class="gl-display-flex gl-flex-wrap gl-mb-7">
<metric-tile
v-for="metric in metrics"
:key="metric.identifier"

View File

@ -123,7 +123,7 @@ export default {
/>
</gl-form-group>
<div class="gl-display-flex gl-flex-wrap-wrap">
<div class="gl-display-flex gl-flex-wrap">
<gl-button
type="submit"
class="gl-sm-mr-3 gl-w-full gl-sm-w-auto"

View File

@ -127,7 +127,7 @@ export default {
</li>
</ul>
</gl-card>
<div class="gl-my-n2 gl-mx-n2 gl-display-flex gl-flex-wrap-wrap">
<div class="gl-my-n2 gl-mx-n2 gl-display-flex gl-flex-wrap">
<div class="gl-p-2">
<clipboard-button
:title="$options.i18n.copyButton"

View File

@ -97,7 +97,7 @@ export default {
</blob-filepath>
</div>
<div class="gl-display-flex gl-flex-wrap-wrap file-actions">
<div class="gl-display-flex gl-flex-wrap file-actions">
<viewer-switcher v-if="showViewerSwitcher" v-model="viewer" :doc-icon="blobSwitcherDocIcon" />
<slot name="actions"></slot>

View File

@ -251,7 +251,7 @@ export default {
</h4>
<slot></slot>
</div>
<div v-if="showLabelFooter" class="board-card-labels gl-mt-2 gl-display-flex gl-flex-wrap-wrap">
<div v-if="showLabelFooter" class="board-card-labels gl-mt-2 gl-display-flex gl-flex-wrap">
<template v-for="label in orderedLabels">
<gl-label
:key="label.id"

View File

@ -173,9 +173,7 @@ export default {
</form>
<template #modal-footer>
<div
class="gl-display-flex gl-flex-direction-row gl-justify-content-end gl-flex-wrap-wrap gl-m-0"
>
<div class="gl-display-flex gl-flex-direction-row gl-justify-content-end gl-flex-wrap gl-m-0">
<gl-button data-testid="delete-branch-cancel-button" @click="closeModal">
{{ $options.i18n.cancelButtonText }}
</gl-button>

View File

@ -150,7 +150,7 @@ export default {
<template #modal-footer>
<div
class="gl-display-flex gl-flex-direction-row gl-justify-content-end gl-flex-wrap-wrap gl-m-0 gl-mr-3"
class="gl-display-flex gl-flex-direction-row gl-justify-content-end gl-flex-wrap gl-m-0 gl-mr-3"
>
<gl-button data-testid="delete-merged-branches-cancel-button" @click="closeModal">
{{ $options.i18n.cancelButtonText }}

View File

@ -1,143 +1,41 @@
<script>
import { GlButton, GlModal, GlSprintf } from '@gitlab/ui';
import { createAlert } from '~/alert';
import { TYPENAME_PROJECT } from '~/graphql_shared/constants';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import getJobArtifactsQuery from '../graphql/queries/get_job_artifacts.query.graphql';
import bulkDestroyJobArtifactsMutation from '../graphql/mutations/bulk_destroy_job_artifacts.mutation.graphql';
import { removeArtifactFromStore } from '../graphql/cache_update';
import { GlButton, GlSprintf } from '@gitlab/ui';
import {
I18N_BULK_DELETE_BANNER,
I18N_BULK_DELETE_CLEAR_SELECTION,
I18N_BULK_DELETE_DELETE_SELECTED,
I18N_BULK_DELETE_MODAL_TITLE,
I18N_BULK_DELETE_BODY,
I18N_BULK_DELETE_ACTION,
I18N_BULK_DELETE_PARTIAL_ERROR,
I18N_BULK_DELETE_ERROR,
I18N_MODAL_CANCEL,
BULK_DELETE_MODAL_ID,
} from '../constants';
export default {
name: 'ArtifactsBulkDelete',
components: {
GlButton,
GlModal,
GlSprintf,
},
inject: ['projectId'],
props: {
selectedArtifacts: {
type: Array,
required: true,
},
queryVariables: {
type: Object,
required: true,
},
},
data() {
return {
isModalVisible: false,
isDeleting: false,
};
},
computed: {
checkedCount() {
return this.selectedArtifacts.length || 0;
},
modalActionPrimary() {
return {
text: I18N_BULK_DELETE_ACTION(this.checkedCount),
attributes: {
loading: this.isDeleting,
variant: 'danger',
},
};
},
modalActionCancel() {
return {
text: I18N_MODAL_CANCEL,
attributes: {
loading: this.isDeleting,
},
};
},
},
methods: {
async onConfirmDelete(e) {
// don't close modal until deletion is complete
if (e) {
e.preventDefault();
}
this.isDeleting = true;
try {
await this.$apollo.mutate({
mutation: bulkDestroyJobArtifactsMutation,
variables: {
projectId: convertToGraphQLId(TYPENAME_PROJECT, this.projectId),
ids: this.selectedArtifacts,
},
update: (store, { data }) => {
const { errors, destroyedCount, destroyedIds } = data.bulkDestroyJobArtifacts;
if (errors?.length) {
createAlert({
message: I18N_BULK_DELETE_PARTIAL_ERROR,
captureError: true,
error: new Error(errors.join(' ')),
});
}
if (destroyedIds?.length) {
this.$emit('deleted', destroyedCount);
// Remove deleted artifacts from the cache
destroyedIds.forEach((id) => {
removeArtifactFromStore(store, id, getJobArtifactsQuery, this.queryVariables);
});
store.gc();
this.$emit('clearSelectedArtifacts');
}
},
});
} catch (error) {
this.onError(error);
} finally {
this.isDeleting = false;
this.isModalVisible = false;
}
},
onError(error) {
createAlert({
message: I18N_BULK_DELETE_ERROR,
captureError: true,
error,
});
},
handleClearSelection() {
this.$emit('clearSelectedArtifacts');
},
handleModalShow() {
this.isModalVisible = true;
},
handleModalHide() {
this.isModalVisible = false;
},
},
i18n: {
banner: I18N_BULK_DELETE_BANNER,
clearSelection: I18N_BULK_DELETE_CLEAR_SELECTION,
deleteSelected: I18N_BULK_DELETE_DELETE_SELECTED,
modalTitle: I18N_BULK_DELETE_MODAL_TITLE,
modalBody: I18N_BULK_DELETE_BODY,
},
BULK_DELETE_MODAL_ID,
};
</script>
<template>
<div class="gl-my-4 gl-p-4 gl-border-1 gl-border-solid gl-border-gray-100">
<div
v-if="selectedArtifacts.length > 0"
class="gl-my-4 gl-p-4 gl-border-1 gl-border-solid gl-border-gray-100"
data-testid="bulk-delete-container"
>
<div class="gl-display-flex gl-align-items-center">
<div>
<gl-sprintf :message="$options.i18n.banner(checkedCount)">
@ -150,33 +48,18 @@ export default {
<gl-button
variant="default"
data-testid="bulk-delete-clear-button"
@click="handleClearSelection"
@click="$emit('clearSelectedArtifacts')"
>
{{ $options.i18n.clearSelection }}
</gl-button>
<gl-button
variant="danger"
data-testid="bulk-delete-delete-button"
@click="handleModalShow"
@click="$emit('showBulkDeleteModal')"
>
{{ $options.i18n.deleteSelected }}
</gl-button>
</div>
</div>
<gl-modal
size="sm"
:modal-id="$options.BULK_DELETE_MODAL_ID"
:visible="isModalVisible"
:title="$options.i18n.modalTitle(checkedCount)"
:action-primary="modalActionPrimary"
:action-cancel="modalActionCancel"
@hide="handleModalHide"
@primary="onConfirmDelete"
>
<gl-sprintf
data-testid="bulk-delete-modal-content"
:message="$options.i18n.modalBody(checkedCount)"
/>
</gl-modal>
</div>
</template>

View File

@ -0,0 +1,73 @@
<script>
import { GlModal, GlSprintf } from '@gitlab/ui';
import {
I18N_BULK_DELETE_MODAL_TITLE,
I18N_BULK_DELETE_BODY,
I18N_BULK_DELETE_ACTION,
I18N_MODAL_CANCEL,
BULK_DELETE_MODAL_ID,
} from '../constants';
export default {
name: 'BulkDeleteModal',
components: {
GlModal,
GlSprintf,
},
props: {
visible: {
type: Boolean,
required: true,
},
artifactsToDelete: {
type: Array,
required: true,
},
isDeleting: {
type: Boolean,
required: true,
},
},
computed: {
checkedCount() {
return this.artifactsToDelete.length || 0;
},
modalActionPrimary() {
return {
text: I18N_BULK_DELETE_ACTION(this.checkedCount),
attributes: {
loading: this.isDeleting,
variant: 'danger',
},
};
},
modalActionCancel() {
return {
text: I18N_MODAL_CANCEL,
attributes: {
disabled: this.isDeleting,
},
};
},
},
BULK_DELETE_MODAL_ID,
i18n: {
modalTitle: I18N_BULK_DELETE_MODAL_TITLE,
modalBody: I18N_BULK_DELETE_BODY,
},
};
</script>
<template>
<gl-modal
size="sm"
:modal-id="$options.BULK_DELETE_MODAL_ID"
:visible="visible"
:title="$options.i18n.modalTitle(checkedCount)"
:action-primary="modalActionPrimary"
:action-cancel="modalActionCancel"
v-bind="$attrs"
v-on="$listeners"
>
<gl-sprintf :message="$options.i18n.modalBody(checkedCount)" />
</gl-modal>
</template>

View File

@ -11,12 +11,15 @@ import {
GlFormCheckbox,
} from '@gitlab/ui';
import { createAlert } from '~/alert';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { getIdFromGraphQLId, convertToGraphQLId } from '~/graphql_shared/utils';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { TYPENAME_PROJECT } from '~/graphql_shared/constants';
import getJobArtifactsQuery from '../graphql/queries/get_job_artifacts.query.graphql';
import { totalArtifactsSizeForJob, mapArchivesToJobNodes, mapBooleansToJobNodes } from '../utils';
import bulkDestroyJobArtifactsMutation from '../graphql/mutations/bulk_destroy_job_artifacts.mutation.graphql';
import { removeArtifactFromStore } from '../graphql/cache_update';
import {
STATUS_BADGE_VARIANTS,
I18N_DOWNLOAD,
@ -36,10 +39,13 @@ import {
JOBS_PER_PAGE,
INITIAL_LAST_PAGE_SIZE,
BULK_DELETE_FEATURE_FLAG,
I18N_BULK_DELETE_ERROR,
I18N_BULK_DELETE_PARTIAL_ERROR,
I18N_BULK_DELETE_CONFIRMATION_TOAST,
} from '../constants';
import JobCheckbox from './job_checkbox.vue';
import ArtifactsBulkDelete from './artifacts_bulk_delete.vue';
import BulkDeleteModal from './bulk_delete_modal.vue';
import ArtifactsTableRowDetails from './artifacts_table_row_details.vue';
import FeedbackBanner from './feedback_banner.vue';
@ -67,11 +73,12 @@ export default {
TimeAgo,
JobCheckbox,
ArtifactsBulkDelete,
BulkDeleteModal,
ArtifactsTableRowDetails,
FeedbackBanner,
},
mixins: [glFeatureFlagsMixin()],
inject: ['projectPath', 'canDestroyArtifacts'],
inject: ['projectId', 'projectPath', 'canDestroyArtifacts'],
apollo: {
jobArtifacts: {
query: getJobArtifactsQuery,
@ -106,6 +113,9 @@ export default {
expandedJobs: [],
selectedArtifacts: [],
pagination: INITIAL_PAGINATION_STATE,
isBulkDeleteModalVisible: false,
jobArtifactsToDelete: [],
isBulkDeleting: false,
};
},
computed: {
@ -144,6 +154,12 @@ export default {
canBulkDestroyArtifacts() {
return this.glFeatures[BULK_DELETE_FEATURE_FLAG] && this.canDestroyArtifacts;
},
isDeletingArtifactsForJob() {
return this.jobArtifactsToDelete.length > 0;
},
artifactsToDelete() {
return this.isDeletingArtifactsForJob ? this.jobArtifactsToDelete : this.selectedArtifacts;
},
},
methods: {
refetchArtifacts() {
@ -191,12 +207,70 @@ export default {
this.selectedArtifacts.splice(this.selectedArtifacts.indexOf(artifactNode.id), 1);
}
},
onConfirmBulkDelete(e) {
// don't close modal until deletion is complete
if (e) {
e.preventDefault();
}
this.isBulkDeleting = true;
this.$apollo
.mutate({
mutation: bulkDestroyJobArtifactsMutation,
variables: {
projectId: convertToGraphQLId(TYPENAME_PROJECT, this.projectId),
ids: this.artifactsToDelete,
},
update: (store, { data }) => {
const { errors, destroyedCount, destroyedIds } = data.bulkDestroyJobArtifacts;
if (errors?.length) {
createAlert({
message: I18N_BULK_DELETE_PARTIAL_ERROR,
captureError: true,
error: new Error(errors.join(' ')),
});
}
if (destroyedIds?.length) {
this.$toast.show(I18N_BULK_DELETE_CONFIRMATION_TOAST(destroyedCount));
// Remove deleted artifacts from the cache
destroyedIds.forEach((id) => {
removeArtifactFromStore(store, id, getJobArtifactsQuery, this.queryVariables);
});
store.gc();
if (!this.isDeletingArtifactsForJob) {
this.clearSelectedArtifacts();
}
}
},
})
.catch((error) => {
this.onError(error);
})
.finally(() => {
this.isBulkDeleting = false;
this.isBulkDeleteModalVisible = false;
this.jobArtifactsToDelete = [];
});
},
onError(error) {
createAlert({
message: I18N_BULK_DELETE_ERROR,
captureError: true,
error,
});
},
handleBulkDeleteModalShow() {
this.isBulkDeleteModalVisible = true;
},
handleBulkDeleteModalHidden() {
this.isBulkDeleteModalVisible = false;
this.jobArtifactsToDelete = [];
},
clearSelectedArtifacts() {
this.selectedArtifacts = [];
},
showDeletedToast(deletedCount) {
this.$toast.show(I18N_BULK_DELETE_CONFIRMATION_TOAST(deletedCount));
},
downloadPath(job) {
return job.archive?.downloadPath;
},
@ -206,6 +280,13 @@ export default {
browseButtonDisabled(job) {
return !job.browseArtifactsPath;
},
deleteButtonDisabled(job) {
return !job.hasArtifacts || !this.canBulkDestroyArtifacts;
},
deleteArtifactsForJob(job) {
this.jobArtifactsToDelete = job.artifacts.nodes.map((node) => node.id);
this.handleBulkDeleteModalShow();
},
},
fields: [
{
@ -257,11 +338,17 @@ export default {
<div>
<feedback-banner />
<artifacts-bulk-delete
v-if="canBulkDestroyArtifacts && anyArtifactsSelected"
v-if="canBulkDestroyArtifacts"
:selected-artifacts="selectedArtifacts"
:query-variables="queryVariables"
@clearSelectedArtifacts="clearSelectedArtifacts"
@deleted="showDeletedToast"
@showBulkDeleteModal="handleBulkDeleteModalShow"
/>
<bulk-delete-modal
:visible="isBulkDeleteModalVisible"
:artifacts-to-delete="artifactsToDelete"
:is-deleting="isBulkDeleting"
@primary="onConfirmBulkDelete"
@hidden="handleBulkDeleteModalHidden"
/>
<gl-table
:items="jobArtifacts"
@ -382,10 +469,11 @@ export default {
<gl-button
v-if="canDestroyArtifacts"
icon="remove"
:disabled="deleteButtonDisabled(item)"
:title="$options.i18n.delete"
:aria-label="$options.i18n.delete"
data-testid="job-artifacts-delete-button"
disabled
@click="deleteArtifactsForJob(item)"
/>
</gl-button-group>
</template>

View File

@ -461,7 +461,7 @@ export default {
@dismiss="dismissTip"
>
<div
class="gl-display-flex gl-flex-direction-row gl-flex-wrap-wrap gl-md-flex-wrap-nowrap gl-gap-3"
class="gl-display-flex gl-flex-direction-row gl-flex-wrap gl-md-flex-wrap-nowrap gl-gap-3"
>
<div>
<p>

View File

@ -52,7 +52,7 @@ export default {
};
</script>
<template>
<div class="gl-mb-4 gl-display-flex gl-flex-wrap-wrap gl-gap-3">
<div class="gl-mb-4 gl-display-flex gl-flex-wrap gl-gap-3">
<gl-button
v-if="showFileTreeToggle"
id="file-tree-toggle"

View File

@ -134,9 +134,7 @@ export default {
</script>
<template>
<div
class="gl-display-flex gl-justify-content-space-between gl-align-items-center gl-flex-wrap-wrap"
>
<div class="gl-display-flex gl-justify-content-space-between gl-align-items-center gl-flex-wrap">
<template v-if="showLoadingState">
<div>
<gl-loading-icon class="gl-mr-auto gl-display-inline-block" size="sm" />
@ -173,7 +171,7 @@ export default {
</gl-sprintf>
</span>
</div>
<div class="gl-display-flex gl-flex-wrap-wrap">
<div class="gl-display-flex gl-flex-wrap">
<pipeline-editor-mini-graph :pipeline="pipeline" v-on="$listeners" />
<gl-button
class="gl-ml-3"

View File

@ -38,9 +38,9 @@ export default {
</script>
<template>
<div
class="gl-display-flex gl-justify-content-space-between gl-align-items-center gl-gap-3 gl-flex-wrap-wrap gl-py-5 gl-border-b-1 gl-border-b-solid gl-border-b-gray-100"
class="gl-display-flex gl-justify-content-space-between gl-align-items-center gl-gap-3 gl-flex-wrap gl-py-5 gl-border-b-1 gl-border-b-solid gl-border-b-gray-100"
>
<div class="gl-display-flex gl-align-items-flex-start gl-gap-3 gl-flex-wrap-wrap">
<div class="gl-display-flex gl-align-items-flex-start gl-gap-3 gl-flex-wrap">
<runner-status-badge :runner="runner" />
<runner-type-badge v-if="runner" :type="runner.runnerType" />
<span>
@ -65,6 +65,6 @@ export default {
</template>
</span>
</div>
<div class="gl-display-flex gl-gap-3 gl-flex-wrap-wrap"><slot name="actions"></slot></div>
<div class="gl-display-flex gl-gap-3 gl-flex-wrap"><slot name="actions"></slot></div>
</div>
</template>

View File

@ -57,7 +57,7 @@ export default {
<div class="gl-mt-3 gl-mb-6">
<label>{{ s__('Runners|Operating systems') }}</label>
<div class="gl-display-flex gl-flex-wrap-wrap gl-gap-5">
<div class="gl-display-flex gl-flex-wrap gl-gap-5">
<!-- eslint-disable @gitlab/vue-require-i18n-strings -->
<runner-platforms-radio v-model="model" :value="$options.LINUX_PLATFORM">
Linux
@ -74,7 +74,7 @@ export default {
<div class="gl-mt-3 gl-mb-6">
<label>{{ s__('Runners|Cloud templates') }}</label>
<!-- eslint-disable @gitlab/vue-require-i18n-strings -->
<div class="gl-display-flex gl-flex-wrap-wrap gl-gap-5">
<div class="gl-display-flex gl-flex-wrap gl-gap-5">
<runner-platforms-radio
v-model="model"
:image="$options.AWS_LOGO_URL"
@ -88,7 +88,7 @@ export default {
<div class="gl-mt-3 gl-mb-6">
<label>{{ s__('Runners|Containers') }}</label>
<div class="gl-display-flex gl-flex-wrap-wrap gl-gap-5">
<div class="gl-display-flex gl-flex-wrap gl-gap-5">
<!-- eslint-disable @gitlab/vue-require-i18n-strings -->
<runner-platforms-radio :image="$options.DOCKER_LOGO_URL">
<gl-link :href="$options.DOCKER_HELP_URL" target="_blank">

View File

@ -83,7 +83,7 @@ export default {
</script>
<template>
<runner-count #default="{ count }" :scope="scope" :variables="variables">
<div v-if="count" class="gl-display-flex gl-flex-wrap-wrap gl-py-6">
<div v-if="count" class="gl-display-flex gl-flex-wrap gl-py-6">
<runner-single-stat
v-for="stat in stats"
:key="stat.key"

View File

@ -26,7 +26,7 @@ export default {
class="gl-w-full gl-border-b gl-display-flex gl-justify-content-end"
data-testid="formatting-toolbar"
>
<div class="gl-py-2 gl-display-flex gl-flex-wrap-wrap gl-align-items-end">
<div class="gl-py-2 gl-display-flex gl-flex-wrap gl-align-items-end">
<toolbar-text-style-dropdown
data-testid="text-styles"
@execute="trackToolbarControlExecution"

View File

@ -11,6 +11,11 @@ export const KEYBOARD_SHORTCUT_TRACKING_ACTION = 'execute_keyboard_shortcut';
export const INPUT_RULE_TRACKING_ACTION = 'execute_input_rule';
export const TEXT_STYLE_DROPDOWN_ITEMS = [
{
contentType: 'paragraph',
editorCommand: 'setParagraph',
label: __('Normal text'),
},
{
contentType: 'heading',
commandParams: { level: 1 },
@ -35,11 +40,6 @@ export const TEXT_STYLE_DROPDOWN_ITEMS = [
commandParams: { level: 4 },
label: __('Heading 4'),
},
{
contentType: 'paragraph',
editorCommand: 'setParagraph',
label: __('Normal text'),
},
];
export const ALERT_EVENT = 'alert';

View File

@ -368,7 +368,7 @@ export default {
data-testid="design-toolbar-wrapper"
>
<div
class="gl-display-flex gl-justify-content-space-between gl-align-items-center gl-w-full gl-flex-wrap-wrap gl-gap-3"
class="gl-display-flex gl-justify-content-space-between gl-align-items-center gl-w-full gl-flex-wrap gl-gap-3"
>
<div class="gl-display-flex gl-align-items-center">
<span class="gl-font-weight-bold gl-mr-3">{{ s__('DesignManagement|Designs') }}</span>

View File

@ -24,7 +24,7 @@ export default {
<!-- eslint-disable-next-line vue/no-deprecated-functional-template -->
<template functional>
<div class="gl-display-flex gl-flex-wrap-wrap gl-mb-2">
<div class="gl-display-flex gl-flex-wrap gl-mb-2">
<template v-if="props.renderGroup">
<button
v-for="emoji in props.emojis"

View File

@ -186,7 +186,7 @@ export default {
<span v-if="appliesToAllEnvironments" class="text-secondary gl-mt-3 mt-md-0 ml-md-3">
{{ $options.i18n.allEnvironments }}
</span>
<div v-else class="gl-display-flex gl-align-items-center gl-flex-wrap-wrap">
<div v-else class="gl-display-flex gl-align-items-center gl-flex-wrap">
<gl-token
v-for="environment in filteredEnvironments"
:key="environment.id"

View File

@ -53,7 +53,7 @@ export default {
<template>
<div v-if="canCreateSubgroups || canCreateProjects" class="gl-mt-5">
<div class="gl-display-flex gl-mx-n3 gl-my-n3 gl-flex-wrap-wrap">
<div class="gl-display-flex gl-mx-n3 gl-my-n3 gl-flex-wrap">
<div v-if="canCreateSubgroups" class="gl-p-3 gl-w-full gl-sm-w-half">
<gl-link :href="newSubgroupPath" :class="$options.linkClasses">
<div class="svg-content gl-w-15 gl-flex-shrink-0 gl-mr-5">

View File

@ -190,7 +190,7 @@ export default {
<div class="group-text-container d-flex flex-fill align-items-center">
<div class="group-text flex-grow-1 flex-shrink-1">
<div
class="gl-display-flex gl-align-items-center gl-flex-wrap-wrap title namespace-title gl-font-weight-bold gl-mr-3"
class="gl-display-flex gl-align-items-center gl-flex-wrap title namespace-title gl-font-weight-bold gl-mr-3"
>
<a
v-gl-tooltip.bottom
@ -259,7 +259,7 @@ export default {
<gl-badge variant="warning">{{ __('pending deletion') }}</gl-badge>
</div>
<div
class="metadata gl-display-flex gl-flex-grow-1 gl-flex-shrink-0 gl-flex-wrap-wrap justify-content-md-between"
class="metadata gl-display-flex gl-flex-grow-1 gl-flex-shrink-0 gl-flex-wrap justify-content-md-between"
>
<item-stats
:item="group"

View File

@ -135,7 +135,7 @@ export default {
<template v-if="hasIncompatibleRepos">
<slot name="incompatible-repos-warning"></slot>
</template>
<div class="gl-display-flex gl-justify-content-space-between gl-flex-wrap-wrap gl-mb-5">
<div class="gl-display-flex gl-justify-content-space-between gl-flex-wrap gl-mb-5">
<gl-button
variant="confirm"
:loading="isImportingAnyRepo"

View File

@ -14,3 +14,11 @@ export default () =>
mutations,
state: createState(),
});
export const createRefModule = () => ({
namespaced: true,
actions,
getters,
mutations,
state: createState(),
});

View File

@ -0,0 +1,91 @@
<script>
import { GlButton, GlFormGroup, GlFormInput, GlFormTextarea } from '@gitlab/ui';
import { mapState, mapActions } from 'vuex';
import { uniqueId } from 'lodash';
import { __, s__ } from '~/locale';
import RefSelector from '~/ref/components/ref_selector.vue';
export default {
components: {
GlButton,
GlFormGroup,
GlFormInput,
GlFormTextarea,
RefSelector,
},
model: {
prop: 'value',
event: 'change',
},
props: {
value: { type: String, required: true },
},
data() {
return {
nameId: uniqueId('tag-name-'),
refId: uniqueId('ref-'),
messageId: uniqueId('message-'),
};
},
computed: {
...mapState('editNew', ['projectId', 'release', 'createFrom']),
},
methods: {
...mapActions('editNew', ['updateReleaseTagMessage', 'updateCreateFrom']),
},
i18n: {
tagNameLabel: __('Tag name'),
refLabel: __('Create from'),
messageLabel: s__('CreateGitTag|Set tag message'),
messagePlaceholder: s__(
'CreateGitTag|Add a message to the tag. Leaving this blank creates a lightweight tag.',
),
create: __('Save'),
cancel: s__('Release|Select another tag'),
refSelector: {
noRefSelected: __('No source selected'),
searchPlaceholder: __('Search branches, tags, and commits'),
dropdownHeader: __('Select source'),
},
},
};
</script>
<template>
<div class="gl-p-3" data-testid="create-from-field">
<gl-form-group
class="gl-mb-3"
:label="$options.i18n.tagNameLabel"
:label-for="nameId"
label-sr-only
>
<gl-form-input :id="nameId" :value="value" autofocus @input="$emit('change', $event)" />
</gl-form-group>
<gl-form-group class="gl-mb-3" :label="$options.i18n.refLabel" :label-for="refId" label-sr-only>
<ref-selector
:id="refId"
:project-id="projectId"
:value="createFrom"
:translations="$options.i18n.refSelector"
@input="updateCreateFrom"
/>
</gl-form-group>
<gl-form-group
class="gl-mb-3"
:label="$options.i18n.messageLabel"
:label-for="messageId"
label-sr-only
>
<gl-form-textarea
:id="messageId"
:placeholder="$options.i18n.messagePlaceholder"
:no-resize="false"
:value="release.tagMessage"
@input="updateReleaseTagMessage"
/>
</gl-form-group>
<gl-button class="gl-mr-3" variant="confirm" @click="$emit('create')">
{{ $options.i18n.create }}
</gl-button>
<gl-button @click="$emit('cancel')">{{ $options.i18n.cancel }}</gl-button>
</div>
</template>

View File

@ -1,225 +1,133 @@
<script>
import {
GlCollapse,
GlLink,
GlFormGroup,
GlFormTextarea,
GlDropdownItem,
GlSprintf,
} from '@gitlab/ui';
import { uniqueId } from 'lodash';
import { GlDropdown, GlFormGroup, GlPopover } from '@gitlab/ui';
import { mapState, mapActions, mapGetters } from 'vuex';
import { __, s__ } from '~/locale';
import RefSelector from '~/ref/components/ref_selector.vue';
import { REF_TYPE_TAGS } from '~/ref/constants';
import FormFieldContainer from './form_field_container.vue';
import TagSearch from './tag_search.vue';
import TagCreate from './tag_create.vue';
export default {
name: 'TagFieldNew',
components: {
GlCollapse,
GlDropdown,
GlFormGroup,
GlFormTextarea,
GlLink,
RefSelector,
FormFieldContainer,
GlDropdownItem,
GlSprintf,
GlPopover,
TagSearch,
TagCreate,
},
data() {
return {
// Keeps track of whether or not the user has interacted with
// the input field. This is used to avoid showing validation
// errors immediately when the page loads.
isInputDirty: false,
};
return { id: 'release-tag-name', newTagName: '', show: false, isInputDirty: false };
},
computed: {
...mapState('editNew', ['projectId', 'release', 'createFrom', 'showCreateFrom']),
...mapGetters('editNew', ['validationErrors']),
tagName: {
get() {
return this.release.tagName;
},
set(tagName) {
this.updateReleaseTagName(tagName);
// This setter is used by the `v-model` on the `RefSelector`.
// When this is called, the selection originated from the
// dropdown list of existing tag names, so we know the tag
// already exists and don't need to show the "create from" input
this.updateShowCreateFrom(false);
},
},
tagMessage: {
get() {
return this.release.tagMessage;
},
set(tagMessage) {
this.updateReleaseTagMessage(tagMessage);
},
},
createFromModel: {
get() {
return this.createFrom;
},
set(createFrom) {
this.updateCreateFrom(createFrom);
},
...mapState('editNew', ['release', 'showCreateFrom']),
...mapGetters('editNew', ['validationErrors', 'isSearching', 'isCreating']),
title() {
return this.isCreating ? this.$options.i18n.createTitle : this.$options.i18n.selectTitle;
},
showTagNameValidationError() {
return this.isInputDirty && !this.validationErrors.tagNameValidation.isValid;
},
tagNameInputId() {
return uniqueId('tag-name-input-');
},
createFromSelectorId() {
return uniqueId('create-from-selector-');
},
tagFeedback() {
return this.validationErrors.tagNameValidation.validationErrors[0];
},
buttonText() {
return this.release?.tagName || s__('Release|Search or create tag name');
},
buttonVariant() {
return this.showTagNameValidationError ? 'danger' : 'default';
},
createText() {
return this.newTagName ? this.$options.i18n.createTag : this.$options.i18n.typeNew;
},
},
methods: {
...mapActions('editNew', [
'setSearching',
'setCreating',
'setNewTag',
'setExistingTag',
'updateReleaseTagName',
'updateReleaseTagMessage',
'updateCreateFrom',
'fetchTagNotes',
'updateShowCreateFrom',
]),
startCreate(query) {
this.newTagName = query;
this.setCreating();
},
selected(tag) {
this.updateReleaseTagName(tag);
if (this.isSearching) {
this.fetchTagNotes(tag);
this.setExistingTag();
this.newTagName = '';
} else {
this.setNewTag();
}
this.hidePopover();
},
markInputAsDirty() {
this.isInputDirty = true;
},
createTagClicked(newTagName) {
this.updateReleaseTagName(newTagName);
// This method is called when the user selects the "create tag"
// option, so the tag does not already exist. Because of this,
// we need to show the "create from" input.
this.updateShowCreateFrom(true);
showPopover() {
this.show = true;
},
shouldShowCreateTagOption(isLoading, matches, query) {
// Show the "create tag" option if:
return (
// we're not currently loading any results, and
!isLoading &&
// the search query isn't just whitespace, and
query.trim() &&
// the `matches` object is non-null, and
matches &&
// the tag name doesn't already exist
!matches.tags.list.some(
(tagInfo) => tagInfo.name.toUpperCase() === query.toUpperCase().trim(),
)
);
hidePopover() {
this.show = false;
},
},
translations: {
tagName: {
noRefSelected: __('No tag selected'),
dropdownHeader: __('Tag name'),
searchPlaceholder: __('Search or create tag'),
label: __('Tag name'),
labelDescription: __('*Required'),
},
createFrom: {
noRefSelected: __('No source selected'),
searchPlaceholder: __('Search branches, tags, and commits'),
dropdownHeader: __('Select source'),
label: __('Create from'),
description: __('Existing branch name, tag, or commit SHA'),
},
annotatedTag: {
label: s__('CreateGitTag|Set tag message'),
description: s__(
'CreateGitTag|Add a message to the tag. Leaving this blank creates a %{linkStart}lightweight tag%{linkEnd}.',
),
},
i18n: {
selectTitle: __('Tags'),
createTitle: s__('Release|Create tag'),
label: __('Tag name'),
required: __('(required)'),
create: __('Create'),
cancel: __('Cancel'),
},
tagMessageId: uniqueId('tag-message-'),
tagNameEnabledRefTypes: [REF_TYPE_TAGS],
gitTagDocsLink: 'https://git-scm.com/book/en/v2/Git-Basics-Tagging/',
};
</script>
<template>
<div>
<div class="row">
<gl-form-group
data-testid="tag-name-field"
class="col-md-4 col-sm-10"
:label="$options.i18n.label"
:label-for="id"
:optional-text="$options.i18n.required"
:state="!showTagNameValidationError"
:invalid-feedback="tagFeedback"
:label="$options.translations.tagName.label"
:label-for="tagNameInputId"
:label-description="$options.translations.tagName.labelDescription"
optional
data-testid="tag-name-field"
>
<form-field-container>
<ref-selector
:id="tagNameInputId"
v-model="tagName"
:project-id="projectId"
:translations="$options.translations.tagName"
:enabled-ref-types="$options.tagNameEnabledRefTypes"
:state="!showTagNameValidationError"
@input="fetchTagNotes"
@hide.once="markInputAsDirty"
>
<template #footer="{ isLoading, matches, query }">
<gl-dropdown-item
v-if="shouldShowCreateTagOption(isLoading, matches, query)"
is-check-item
:is-checked="tagName === query"
@click="createTagClicked(query)"
>
<gl-sprintf :message="__('Create tag %{tagName}')">
<template #tagName>
<b>{{ query }}</b>
</template>
</gl-sprintf>
</gl-dropdown-item>
</template>
</ref-selector>
</form-field-container>
<gl-dropdown
:id="id"
:variant="buttonVariant"
:text="buttonText"
:toggle-class="['gl-text-gray-900!']"
category="secondary"
class="gl-w-30"
@show.prevent="showPopover"
/>
<gl-popover
:show="show"
:target="id"
:title="title"
:css-classes="['gl-z-index-200', 'release-tag-selector']"
placement="bottom"
triggers="manual"
container="content-body"
show-close-button
@close-button-clicked="hidePopover"
@hide.once="markInputAsDirty"
>
<div class="gl-border-t-solid gl-border-t-1 gl-border-gray-200">
<tag-create
v-if="isCreating"
v-model="newTagName"
@create="selected(newTagName)"
@cancel="setSearching"
/>
<tag-search v-else v-model="newTagName" @create="startCreate" @select="selected" />
</div>
</gl-popover>
</gl-form-group>
<gl-collapse :visible="showCreateFrom">
<div class="gl-pl-6 gl-border-l-1 gl-border-l-solid gl-border-gray-300">
<gl-form-group
v-if="showCreateFrom"
:label="$options.translations.createFrom.label"
:label-for="createFromSelectorId"
data-testid="create-from-field"
>
<form-field-container>
<ref-selector
:id="createFromSelectorId"
v-model="createFromModel"
:project-id="projectId"
:translations="$options.translations.createFrom"
/>
</form-field-container>
<template #description>{{ $options.translations.createFrom.description }}</template>
</gl-form-group>
<gl-form-group
v-if="showCreateFrom"
:label="$options.translations.annotatedTag.label"
:label-for="$options.tagMessageId"
data-testid="annotated-tag-message-field"
>
<gl-form-textarea :id="$options.tagMessageId" v-model="tagMessage" />
<template #description>
<gl-sprintf :message="$options.translations.annotatedTag.description">
<template #link="{ content }">
<gl-link
:href="$options.gitTagDocsLink"
rel="noopener noreferrer"
target="_blank"
>{{ content }}</gl-link
>
</template>
</gl-sprintf>
</template>
</gl-form-group>
</div>
</gl-collapse>
</div>
</template>

View File

@ -0,0 +1,121 @@
<script>
import { GlButton, GlDropdownItem, GlSearchBoxByType, GlSprintf } from '@gitlab/ui';
import { mapState, mapActions } from 'vuex';
import { debounce } from 'lodash';
import { REF_TYPE_TAGS, SEARCH_DEBOUNCE_MS } from '~/ref/constants';
import { __, s__ } from '~/locale';
export default {
components: {
GlButton,
GlDropdownItem,
GlSearchBoxByType,
GlSprintf,
},
model: {
prop: 'query',
event: 'change',
},
props: {
query: {
type: String,
required: false,
default: '',
},
},
data() {
return { tagName: '' };
},
computed: {
...mapState('ref', ['matches']),
...mapState('editNew', ['projectId', 'release']),
tags() {
return this.matches?.tags?.list || [];
},
createText() {
return this.query ? this.$options.i18n.createTag : this.$options.i18n.typeNew;
},
selectedNotShown() {
return this.release.tagName && !this.tags.some((tag) => tag.name === this.release.tagName);
},
},
created() {
this.debouncedSearch = debounce(this.search, SEARCH_DEBOUNCE_MS);
},
mounted() {
this.setProjectId(this.projectId);
this.setEnabledRefTypes([REF_TYPE_TAGS]);
this.search(this.query);
},
methods: {
...mapActions('ref', ['setEnabledRefTypes', 'setProjectId', 'search']),
onSearchBoxInput(searchQuery = '') {
const query = searchQuery.trim();
this.$emit('change', query);
this.debouncedSearch(query);
},
selected(tagName) {
return (this.release?.tagName ?? '') === tagName;
},
},
i18n: {
noResults: __('No results found'),
createTag: s__('Release|Create tag %{tag}'),
typeNew: s__('Release|Or type a new tag name'),
},
};
</script>
<template>
<div data-testid="tag-name-search">
<gl-search-box-by-type
:value="query"
class="gl-border-b-solid gl-border-b-1 gl-border-gray-200"
borderless
autofocus
@input="onSearchBoxInput"
/>
<div class="gl-overflow-y-auto release-tag-list">
<div v-if="tags.length || release.tagName">
<gl-dropdown-item
v-if="selectedNotShown"
is-checked
is-check-item
class="gl-list-style-none"
>
{{ release.tagName }}
</gl-dropdown-item>
<gl-dropdown-item
v-for="tag in tags"
:key="tag.name"
:is-checked="selected(tag.name)"
is-check-item
class="gl-list-style-none"
@click="$emit('select', tag.name)"
>
{{ tag.name }}
</gl-dropdown-item>
</div>
<div
v-else
class="gl-my-5 gl-text-gray-500 gl-display-flex gl-font-base gl-justify-content-center"
>
{{ $options.i18n.noResults }}
</div>
</div>
<div class="gl-border-t-solid gl-border-t-1 gl-border-gray-200 gl-py-3">
<gl-button
category="tertiary"
class="gl-justify-content-start! gl-rounded-0!"
block
:disabled="!query"
@click="$emit('create', query)"
>
<gl-sprintf :message="createText">
<template #tag>
<span class="gl-font-weight-bold">{{ query }}</span>
</template>
</gl-sprintf>
</gl-button>
</div>
</div>
</template>

View File

@ -1,5 +1,6 @@
import Vue from 'vue';
import Vuex from 'vuex';
import { createRefModule } from '../ref/stores';
import ReleaseEditNewApp from './components/app_edit_new.vue';
import createStore from './stores';
import createEditNewModule from './stores/modules/edit_new';
@ -12,6 +13,7 @@ export default () => {
const store = createStore({
modules: {
editNew: createEditNewModule({ ...el.dataset, isExistingRelease: false }),
ref: createRefModule(),
},
});

View File

@ -274,3 +274,9 @@ export const deleteRelease = ({ commit, getters, dispatch, state }) => {
});
});
};
export const setSearching = ({ commit }) => commit(types.SET_SEARCHING);
export const setCreating = ({ commit }) => commit(types.SET_CREATING);
export const setExistingTag = ({ commit }) => commit(types.SET_EXISTING_TAG);
export const setNewTag = ({ commit }) => commit(types.SET_NEW_TAG);

View File

@ -0,0 +1,4 @@
export const SEARCH = 'SEARCH';
export const CREATE = 'CREATE';
export const EXISTING_TAG = 'EXISTING_TAG';
export const NEW_TAG = 'NEW_TAG';

View File

@ -4,6 +4,7 @@ import { hasContent } from '~/lib/utils/text_utility';
import { getDuplicateItemsFromArray } from '~/lib/utils/array_utility';
import { validateTag, ValidationResult } from '~/lib/utils/ref_validator';
import { i18n } from '~/releases/constants';
import { SEARCH, CREATE, EXISTING_TAG, NEW_TAG } from './constants';
/**
* @param {Object} link The link to test
@ -183,3 +184,9 @@ export const formattedReleaseNotes = ({
export const releasedAtChanged = ({ originalReleasedAt, release }) =>
originalReleasedAt !== release.releasedAt;
export const isSearching = ({ step }) => step === SEARCH;
export const isCreating = ({ step }) => step === CREATE;
export const isExistingTag = ({ tagStep }) => tagStep === EXISTING_TAG;
export const isNewTag = ({ tagStep }) => tagStep === NEW_TAG;

View File

@ -29,3 +29,9 @@ export const RECEIVE_TAG_NOTES_ERROR = 'RECEIVE_TAG_NOTES_ERROR';
export const UPDATE_INCLUDE_TAG_NOTES = 'UPDATE_INCLUDE_TAG_NOTES';
export const UPDATE_RELEASED_AT = 'UPDATE_RELEASED_AT';
export const SET_SEARCHING = 'SET_SEARCHING';
export const SET_CREATING = 'SET_CREATING';
export const SET_EXISTING_TAG = 'SET_EXISTING_TAG';
export const SET_NEW_TAG = 'SET_NEW_TAG';

View File

@ -1,6 +1,7 @@
import { uniqueId, cloneDeep } from 'lodash';
import { DEFAULT_ASSET_LINK_TYPE } from '../../../constants';
import * as types from './mutation_types';
import { SEARCH, CREATE, EXISTING_TAG, NEW_TAG } from './constants';
const findReleaseLink = (release, id) => {
return release.assets.links.find((l) => l.id === id);
@ -127,4 +128,17 @@ export default {
[types.UPDATE_RELEASED_AT](state, releasedAt) {
state.release.releasedAt = releasedAt;
},
[types.SET_SEARCHING](state) {
state.step = SEARCH;
},
[types.SET_CREATING](state) {
state.step = CREATE;
},
[types.SET_EXISTING_TAG](state) {
state.tagStep = EXISTING_TAG;
},
[types.SET_NEW_TAG](state) {
state.tagStep = NEW_TAG;
},
};

View File

@ -1,3 +1,5 @@
import { SEARCH, EXISTING_TAG } from './constants';
export default ({
isExistingRelease,
projectId,
@ -62,4 +64,6 @@ export default ({
includeTagNotes: false,
existingRelease: null,
originalReleasedAt: new Date(),
step: SEARCH,
tagStep: EXISTING_TAG,
});

View File

@ -96,7 +96,6 @@ export default {
data() {
return {
isEditing: false,
isSubmitting: false,
};
},
computed: {
@ -182,8 +181,8 @@ export default {
updateDraft(this.autosaveKey, this.note.body);
},
async updateNote(newText) {
this.isSubmitting = true;
try {
this.isEditing = false;
await this.$apollo.mutate({
mutation: updateWorkItemNoteMutation,
variables: {
@ -202,24 +201,12 @@ export default {
},
},
});
/**
* https://gitlab.com/gitlab-org/gitlab/-/issues/388314
*
* Once form is successfully submitted,
* mark isSubmitting to false and clear storage before hiding the form.
* This will restrict comment form to restore the value while textarea
* input triggered due to keyboard event meta+enter.
*
*/
clearDraft(this.autosaveKey);
this.isEditing = false;
} catch (error) {
updateDraft(this.autosaveKey, newText);
this.isEditing = true;
this.$emit('error', __('Something went wrong when updating a comment. Please try again'));
Sentry.captureException(error);
} finally {
this.isSubmitting = false;
}
},
getNewAssigneesAndWidget() {

View File

@ -10,3 +10,17 @@
min-height: 46px;
}
}
.release-tag-selector {
.popover-body {
padding-left: 0;
padding-right: 0;
padding-bottom: 0;
min-width: $gl-dropdown-width;
max-width: $gl-dropdown-width;
}
.release-tag-list {
max-height: $dropdown-max-height;
}
}

View File

@ -68,6 +68,10 @@ class Admin::ProjectsController < Admin::ApplicationController
result = ::Projects::UpdateService.new(@project, current_user, project_params).execute
if result[:status] == :success
unless Gitlab::Utils.to_boolean(project_params['runner_registration_enabled'])
Ci::Runners::ResetRegistrationTokenService.new(@project, current_user).execute
end
redirect_to [:admin, @project], notice: format(_("Project '%{project_name}' was successfully updated."), project_name: @project.name)
else
render "edit"
@ -103,7 +107,8 @@ class Admin::ProjectsController < Admin::ApplicationController
def allowed_project_params
[
:description,
:name
:name,
:runner_registration_enabled
]
end
end

View File

@ -330,4 +330,17 @@ module BlobHelper
@path.to_s.end_with?(Ci::Pipeline::CONFIG_EXTENSION) ||
@path.to_s == @project.ci_config_path_or_default
end
def vue_blob_app_data(project, blob, ref)
{
blob_path: blob.path,
project_path: project.full_path,
resource_id: project.to_global_id,
user_id: current_user.present? ? current_user.to_global_id : '',
target_branch: project.empty_repo? ? ref : @ref,
original_branch: @ref
}
end
end
BlobHelper.prepend_mod_with('BlobHelper')

View File

@ -847,4 +847,12 @@ def can_admin_group_clusters?(project)
project.group && project.group.clusters.any? && can?(current_user, :admin_cluster, project.group)
end
def can_view_branch_rules?
can?(current_user, :maintainer_access, @project)
end
def branch_rules_path
project_settings_repository_path(@project, anchor: 'js-branch-rules')
end
ProjectsHelper.prepend_mod_with('ProjectsHelper')

View File

@ -26,8 +26,7 @@ module Ci
mode: :per_attribute_iv,
algorithm: 'aes-256-gcm',
key: Settings.attr_encrypted_db_key_base_32,
encode: false,
encode_vi: false
encode: false
before_validation :set_default_values

View File

@ -497,6 +497,7 @@ class Project < ApplicationRecord
to: :project_setting, allow_nil: true
delegate :show_diff_preview_in_email, :show_diff_preview_in_email=, :show_diff_preview_in_email?,
:runner_registration_enabled, :runner_registration_enabled?, :runner_registration_enabled=,
to: :project_setting
delegate :squash_always?, :squash_never?, :squash_enabled_by_default?, :squash_readonly?, to: :project_setting

View File

@ -65,6 +65,10 @@ class ProjectSetting < ApplicationRecord
end
strong_memoize_attr :show_diff_preview_in_email?
def runner_registration_enabled
Gitlab::CurrentSettings.valid_runner_registrars.include?('project') && read_attribute(:runner_registration_enabled)
end
private
def validates_mr_default_target_self

View File

@ -222,8 +222,8 @@ class ProjectPolicy < BasePolicy
condition(:"#{f}_disabled", score: 32) { !access_allowed_to?(f.to_sym) }
end
condition(:project_runner_registration_allowed) do
Gitlab::CurrentSettings.valid_runner_registrars.include?('project')
condition(:project_runner_registration_allowed, scope: :subject) do
Gitlab::CurrentSettings.valid_runner_registrars.include?('project') && @subject.runner_registration_enabled
end
condition :registry_enabled do

View File

@ -120,6 +120,8 @@ module Projects
def remove_unallowed_params
params.delete(:emails_disabled) unless can?(current_user, :set_emails_disabled, project)
params.delete(:runner_registration_enabled) if Gitlab::CurrentSettings.valid_runner_registrars.exclude?('project')
end
def after_update

View File

@ -2,7 +2,7 @@
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Import failure external identifiers",
"type": "object",
"maxProperties": 3,
"maxProperties": 4,
"patternProperties": {
".*": {
"oneOf": [

View File

@ -17,6 +17,21 @@
= f.label :description, _('Project description (optional)')
= f.text_area :description, class: 'form-control gl-form-input gl-form-textarea gl-lg-form-input-xl', rows: 5
= render ::Layouts::HorizontalSectionComponent.new(options: { class: 'gl-pb-3 gl-mb-6' }) do |c|
= c.title { _('Permissions and project features') }
= c.description do
= _('Configure advanced permissions')
= c.body do
- if @project.project_setting.present?
.form-group.gl-form-group
%legend.col-form-label.col-form-label
= s_('Runners|Runner Registration')
- all_disabled = Gitlab::CurrentSettings.valid_runner_registrars.exclude?('project')
= f.gitlab_ui_checkbox_component :runner_registration_enabled,
s_('Runners|New project runners can be registered'),
checkbox_options: { checked: @project.runner_registration_enabled, disabled: all_disabled },
help_text: html_escape_once(s_('Runners|Existing runners are not affected. To permit runner registration for all projects, enable this setting in the Admin Area in Settings &gt; CI/CD.')).html_safe
.gl-mt-5
= f.submit _('Save changes'), pajamas_button: true
= render Pajamas::ButtonComponent.new(href: admin_project_path(@project)) do

View File

@ -10,7 +10,7 @@
.info-well.gl-display-none.gl-sm-display-flex.project-last-commit.gl-flex-direction-column.gl-mt-5
#js-last-commit.gl-m-auto
= gl_loading_icon(size: 'md')
#js-code-owners{ data: { branch: @ref, branch_rules_path: project_settings_repository_path(project, anchor: 'js-branch-rules') } }
#js-code-owners{ data: { branch: @ref, can_view_branch_rules: can_view_branch_rules?, branch_rules_path: branch_rules_path } }
.nav-block.gl-display-flex.gl-xs-flex-direction-column.gl-align-items-stretch
= render 'projects/tree/tree_header', tree: @tree, is_project_overview: is_project_overview

View File

@ -10,7 +10,7 @@
%ul.blob-commit-info
= render 'projects/commits/commit', commit: @last_commit, project: @project, ref: @ref
#js-code-owners{ data: { blob_path: blob.path, project_path: @project.full_path, branch: @ref, branch_rules_path: project_settings_repository_path(project, anchor: 'js-branch-rules') } }
#js-code-owners{ data: { blob_path: blob.path, project_path: @project.full_path, branch: @ref, can_view_branch_rules: can_view_branch_rules?, branch_rules_path: branch_rules_path } }
= render "projects/blob/auxiliary_viewer", blob: blob
- if project.forked?
@ -22,12 +22,7 @@
- if !expanded
-# Data info will be removed once we migrate this to use GraphQL
-# Follow-up issue: https://gitlab.com/gitlab-org/gitlab/-/issues/330406
#js-view-blob-app{ data: { blob_path: blob.path,
project_path: @project.full_path,
resource_id: @project.to_global_id,
user_id: current_user.present? ? current_user.to_global_id : '',
target_branch: project.empty_repo? ? ref : @ref,
original_branch: @ref } }
#js-view-blob-app{ data: vue_blob_app_data(project, blob, ref) }
= gl_loading_icon(size: 'md')
- else
%article.file-holder

View File

@ -1,3 +1,4 @@
- page_title s_('Releases|New Release')
- add_page_specific_style 'page_bundles/releases'
#js-new-release-page{ data: data_for_new_release_page }

View File

@ -134,6 +134,8 @@ module Gitlab
end
def add_identifiers_to_failure(failure, external_identifiers)
external_identifiers[:object_type] = object_type
failure.update_column(:external_identifiers, external_identifiers)
end
end

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/399146
milestone: '15.11'
type: development
group: group::pipeline authoring
default_enabled: false
default_enabled: true

View File

@ -1,8 +1,8 @@
---
name: create_runner_machine
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/109983
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/390261
milestone: '15.9'
name: read_fingerprints_from_uploaded_file_in_maven_upload
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/116236
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/404988
milestone: '15.11'
type: development
group: group::runner
group: group::package registry
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: runner_machine_heartbeat
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/114859
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/390261
milestone: '15.10'
type: development
group: group::runner
default_enabled: false

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddRunnerRegistrationEnabledToProjectSettings < Gitlab::Database::Migration[2.1]
enable_lock_retries!
def change
add_column :project_settings, :runner_registration_enabled, :boolean, default: true
end
end

View File

@ -0,0 +1 @@
8af1f290454aad1e131626a8dab575dfef11ae73581e57b7d7cb1b431fd06737

View File

@ -21071,6 +21071,7 @@ CREATE TABLE project_settings (
emails_enabled boolean DEFAULT true NOT NULL,
pages_unique_domain_enabled boolean DEFAULT false NOT NULL,
pages_unique_domain text,
runner_registration_enabled boolean DEFAULT true,
CONSTRAINT check_1a30456322 CHECK ((char_length(pages_unique_domain) <= 63)),
CONSTRAINT check_2981f15877 CHECK ((char_length(jitsu_key) <= 100)),
CONSTRAINT check_3a03e7557a CHECK ((char_length(previous_default_branch) <= 4096)),

View File

@ -2208,18 +2208,19 @@ it again.
POST /user/runners
```
| Attribute | Type | Required | Description |
|--------------------|--------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------|
| `runner_type` | string | yes | Specifies the scope of the runner; `instance_type`, `group_type`, or `project_type`. |
| `namespace_id` | integer | no | The ID of the project or group that the runner is created in. Required if `runner_type` is `group_type` or `project_type`. |
| `description` | string | no | Description of the runner. |
| `paused` | boolean | no | Specifies if the runner should ignore new jobs. |
| `locked` | boolean | no | Specifies if the runner should be locked for the current project. |
| `run_untagged` | boolean | no | Specifies if the runner should handle untagged jobs. |
| `tag_list` | string array | no | A list of runner tags. |
| `access_level` | string | no | The access level of the runner; `not_protected` or `ref_protected`. |
| `maximum_timeout` | integer | no | Maximum timeout that limits the amount of time (in seconds) that runners can run jobs. |
| `maintenance_note` | string | no | Free-form maintenance notes for the runner (1024 characters). |
| Attribute | Type | Required | Description |
|--------------------|--------------|----------|---------------------------------------------------------------------------------------------------|
| `runner_type` | string | yes | Specifies the scope of the runner; `instance_type`, `group_type`, or `project_type`. |
| `group_id` | integer | no | The ID of the group that the runner is created in. Required if `runner_type` is `group_type`. |
| `project_id` | integer | no | The ID of the project that the runner is created in. Required if `runner_type` is `project_type`. |
| `description` | string | no | Description of the runner. |
| `paused` | boolean | no | Specifies if the runner should ignore new jobs. |
| `locked` | boolean | no | Specifies if the runner should be locked for the current project. |
| `run_untagged` | boolean | no | Specifies if the runner should handle untagged jobs. |
| `tag_list` | string array | no | A list of runner tags. |
| `access_level` | string | no | The access level of the runner; `not_protected` or `ref_protected`. |
| `maximum_timeout` | integer | no | Maximum timeout that limits the amount of time (in seconds) that runners can run jobs. |
| `maintenance_note` | string | no | Free-form maintenance notes for the runner (1024 characters). |
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" --data "runner_type=instance_type" \

Binary file not shown.

After

Width:  |  Height:  |  Size: 134 KiB

View File

@ -0,0 +1,289 @@
---
status: proposed
creation-date: "2023-01-10"
authors: [ "@ankitbhatnagar", "@ahegyi", "@mikolaj_wawrzyniak" ]
coach: "@grzesiek"
approvers: [ "@nhxnguyen", "@stkerr" ]
owning-stage: "~workinggroup::clickhouse"
participating-stages: [ "~section::ops", "~section::dev" ]
---
# Scalable data ingestion abstraction for ClickHouse
## Table of Contents
- [Summary](#summary)
- [Why](#why)
- [How](#how)
- [Motivation](#motivation)
- [Case Studies](#case-studies)
- [Replicating existing data into ClickHouse](#1-replicating-existing-data-into-clickhouse)
- [Ingesting large volumes of data into ClickHouse](#2-ingesting-large-volumes-of-data-into-clickhouse)
- [Goals](#goals)
- [Non-goals](#non-goals)
- [General considerations](#general-considerations)
- [Challenges building this](#major-challenges-around-building-such-a-capability)
- [Proposed solution](#proposed-solution)
- [Design & Implementation](#design--implementation)
- [References](#references)
## Summary
Develop a scalable & reliable data ingestion abstraction to help efficiently ingest large volumes of data from high throughput systems into ClickHouse.
### Why
To enable any application at GitLab to write necessary data into ClickHouse regardless of the scale at which they generate data today, or in the future. Refer to [Motivation](#motivation) for why ClickHouse in the first place.
### How
By building a write abstraction (API/Library) that allows a user to write data into ClickHouse and has all necessary configurations, conventions and best-practices around instrumentation, service-discovery, etc, built into it out of the box.
## Motivation
ClickHouse is an online, analytical processing (OLAP) database that powers use-cases that require fetching real-time, aggregated data that does not mutate a lot. ClickHouse is highly performant and can scale to large volumes of data as compared to traditional transactional relational databases (OLTP) such as Postgres, MySQL. For further reading around ClickHouse's capabilities, see [[1]](https://about.gitlab.com/blog/2022/04/29/two-sizes-fit-most-postgresql-and-clickhouse/), [[2]](https://clickhouse.com/blog/migrating-data-between-clickhouse-postgres) and [[3]](https://posthog.com/blog/clickhouse-vs-postgres).
At GitLab, [our current and future ClickHouse uses/capabilities](https://gitlab.com/groups/gitlab-com/-/epics/2075) reference & describe multiple use-cases that could be facilitated by using ClickHouse as a backing datastore. A majority of these talk about the following two major areas of concern:
1. Being able to leverage [ClickHouse's OLAP capabilities](https://clickhouse.com/docs/en/faq/general/olap/) enabling underlying systems to perform an aggregated analysis of data, both over short and long periods of time.
1. The fact that executing these operations with our currently existing datasets primarily in Postgres, is starting to become challenging and non-performant.
Looking forward, assuming a larger volume of data being produced by our application(s) and the rate at which it gets produced, the ability to ingest it into a *more* capable system, both effectively and efficiently helps us scale our applications and prepare for business growth.
## Case studies
From an initial assessment of all (reported) use-cases that intend to utilise ClickHouse, the following broad patterns of usage can be observed:
1. Efficiently replicating existing data from other databases into ClickHouse, most prominently Postgres.
1. Directly ingesting large volumes of data into ClickHouse for asynchronous processing, data aggregation & analysis.
The following section(s) explain details of each problem-domain:
### 1. Replicating existing data into ClickHouse
With due reference to our prior work around this, it has been established that logical replication from Postgres is too slow. Instead, we'll need to be able to emit data change events within database transactions which can then get processed asynchronously to write or update corresponding data in ClickHouse.
The following case-studies describe how these groups intend to solve the underlying problem:
- ~group::optimize has been working towards a scalable PostgreSQL data replication strategy which can be implemented on the application layer.
- [Proposal: Scalable data sync/replication strategy](https://gitlab.com/gitlab-org/gitlab/-/issues/382172) talks about such a strategy and the additional challenges with using Sidekiq for queueing/batching needs.
- It has been observed that pumping data from `PostgreSQL` into `ClickHouse` directly might not be the right way to approach the problem at hand.
- In addition to the problems described above, another class of problems when replicating data across systems is also the handling of data backfill and/or data migrations that happen upstream.
- [group::data](https://about.gitlab.com/handbook/business-technology/data-team/) has been working around syncing data from some of our Postgres databases into a Snowflake-based data warehouse. See this issue for optioned considered: [List down all possible options for postgres to snowflake pipeline](https://gitlab.com/gitlab-data/gitlab.com-saas-data-pipeline/-/issues/13) before designing the current system in place.
- With the work done around our [Next Gen GitLab SaaS Data Pipeline](https://docs.google.com/presentation/d/1hVaCY42YhaO5UvgLzp3mbuMYJIFuTFYFJjdhixFTxPE/edit#slide=id.g143a48de8a3_0_0), the data team owns a "custom" pipeline that does incremental data extractions based on an `updated_at` timestamp column. This helps import a significant subset of operational database relations into Snowflake data-warehouse.
- As the volume of data grows, we can foresee this (ETL) pipeline warranting more time and resources to execute resulting in delays across the time between data being produced and being available in Snowflake data-warehouse.
- We might also see data inconsistency/incompleteness issues emanating from the current setup since row deletions are not transferred into Snowflake, inflating data volume and skewing analysis. Any information about multiple updates happening between import interval period are also lost.
- Having a scalable ingestion pipeline that can help replicate data from our databases into an intermediate system and/or ClickHouse in near real-time would help improve the operational characteristics around this system.
### 2. Ingesting large volumes of data into ClickHouse
We need to be able to ingest large volumes of potentially unaggregated data into ClickHouse which may result into a large number of small writes as well. This can have an adverse effect on how ClickHouse processes and stores incoming data. To mitigate this problem, we need to queue & batch smaller writes into larger ones to keep the ingestion pipeline efficient at all times.
The following case-studies describe how each group intends to solve the underlying problem:
- ~group::observability explains their need of ingesting large amounts of data into ClickHouse, with the following two issues:
- [Proposal: GitLab Observability Platform - Data Ingestion](https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/1878) talks about using an external events store, such as Kafka, to first ingest data as received from users, then writing it into ClickHouse in larger batches thereby eliminating the need to write a large number of small writes without hampering write performance from how ClickHouse `MergeTree` processes ingested data.
- In addition, [ClickHouse: Investigate client-side buffering to batch writes into ClickHouse](https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2044) talks about their experimentation with using application-local queueing/batching to work around the problems mentioned above.
- ~"group::product intelligence" has been working on building our analytics offering and recently looking at building and/or improving parts of the system.
- [Product Analytics Collector Component](https://gitlab.com/groups/gitlab-org/-/epics/9346) talks about replacing Jitsu with Snowplow for collecting and processing tracking events. For more details of the proposal, see [Jitsu replacement](https://gitlab.com/gitlab-org/analytics-section/product-intelligence/proposals/-/blob/62d332baf5701810d9e7a0b2c00df18431e82f22/doc/jitsu_replacement.md).
- The initial design was prototyped with [Snowplow as Jitsu Replacement PoC](https://gitlab.com/gitlab-org/analytics-section/product-analytics/devkit/-/merge_requests/37).
- From the design, it is easy to observe how large amounts of data will be ingested into ClickHouse and could potentially benefit from the use of a scalable ingestion pipeline.
## Goals
### Well-defined, established client abstractions
We want to define and establish a fully-functional application-side abstraction that can help ingest data into ClickHouse without getting in the way of how an application itself is designed while keeping the underlying code backend-agnostic. The proposed abstraction should become the default choice for any applications, core or satellite, at GitLab.
### Support for high throughput in volume of writes
A solution here should enable an application to write any amount of inserts (order of upto 1000-5000 writes per second) to the underlying database efficiently while also allowing for growth as the application scales out. Considering how ClickHouse processes incoming writes, a proposed solution should be able to batch a number of very small writes into larger batches.
### Reliable, consistent delivery of data
A solution here should also ensure reliable & consistent delivery of ingested data into the underlying database minimising undue loss of data before being eventually persisted into ClickHouse.
## Non-goals
### Addressing data types, schemas or formats
At this stage of this proposal, we're not optimizing for addressing which data types, schemas or formats we receive ingested data in. It should be delegated to the backend-specific implementations themselves and not handled within the write abstraction.
### Addressing where our data sources exist today
We're also not addressing any client-side specific details into the design at this point. The write abstraction should only remain a tool for the language in which it is written. As long as an application can use it to write data as any other third-party library, we should be good to build on top of it.
## General Considerations
Having addressed the details of the two aformentioned problem-domains, we can model a proposed solution with the following logical structure:
- Ingestion
- APIs/SDKs
- HTTP2/gRPC Sidecar
- Transport & Routing
- Multi-destination
- Digestion/Compute
- Enrichment
- Processing
- Persisting
## Major challenges around building such a capability
### Self-managed environments
The single, biggest challenge around introducing ClickHouse and related systems would be the ability to make it avaiable to our users running GitLab in self-managed environments. The intended goals of this proposal are intentionally kept within those constraints. It is also prudent to establish that what we're *proposing* here be applicable to applications consuming ClickHouse from inside self-managed environments.
There are ongoing efforts to streamline distribution and deployment of ClickHouse instances for managed environment within the larger scope of [ClickHouse Usage at GitLab](../../clickhouse_usage/index.md). A few other issues tackling parts of the aforementioned problem are:
- [Research and understand component costs and maintenance requirements of running a ClickHouse instance with GitLab](https://gitlab.com/gitlab-com/www-gitlab-com/-/issues/14384)
- [ClickHouse maintenance and cost research](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/116669)
### Wide variety of data sources, their structures & usage patterns
The data that we intend to ingest into ClickHouse can come from a wide variety of data sources and be structured in different schemas or formats. With that considered, it's non-trivial effort to draft a solution that suffices all use-cases efficiently.
Should we decide to build an intermediate ingestion system, any solution should help provide a source/schema/format-agnostic data transport layer with an established, matured client-abstraction to maximise the number of applications that can use it.
### Building on top of our current database infrastructure
Our current database infrastructure operates at a fairly large scale and adding more applications that continuously read/write against it adds to the pressure on the existing resources. It's important we move away any workloads and/or datasets that can be safely processed in a different context altogether.
### Service Discovery
We're still normalising the details around distribution and deployment of ClickHouse clusters and/or instances for our applications. Subject to how we end up doing it, for a client to be able to discover which ClickHouse cluster, shard or table would need to become a part any such solution.
## Proposed Solution
In light of the problems discussed earlier, it'd be in our better interests to allow the usage of an external, intermediate system subject to what one's needs might be especially around the volume & scale of data being writen from an application into ClickHouse.
Therefore, we intend to develop an abstraction that can enable an application to store data into ClickHouse regardless of the scale that they (currently) operate at. It also:
- Facilitates an application to switch from one *technology* to another should their performance and/or scale requirements change over time.
- Allows for backend-specific conventions, configurations & best practices such as instrumentation, service-discovery, etc. to be encoded in one place for all applications to leverage consistently.
## Design & Implementation
### Core assumptions
- We're only going to focus on writing data into ClickHouse as mentioned in aforementioned non-goals. With details of how our data lands into ClickHouse, this document does not (intentionally) address where this data comes from. Some of those details are delegated to the applications generating this data i.e as long as they can consume this abstraction, they should be able to write data into ClickHouse.
- We're going to delegate the choice of different storage backends to a following blueprint or epic since that's outside the scope of this design. With ClickHouse as the eventual destination for our data, this document only talks about writing data into it - either directly or indirectly via a queueing/batching system.
### Architecture
![Architecture](clickhouse_dbwriter.png)
Having an abstraction around writing data help client-side instrumentation to stay backend-agnostic allowing them to switch code paths depending on where it runs.
An example setup should look like:
```ruby
Gitlab::Database::Writer.config do |config|
#
# when using sync mode, data gets written directly into ClickHouse,
# therefore, it's also assumed the backend here is ClickHouse
config.mode = :sync OR :async
config.backend = :clickhouse # optional
# OR
#
# when using async mode, data is written to an intermediate system
# first, then written into ClickHouse asynchronously
config.mode = :async
config.backend = :pubsub OR :kafka OR :otherbackend
#
# then backend-specific configurations hereafter
#
config.url = 'tcp://user:pwd@localhost:9000/database'
# e.g. a serializer helps define how data travels over the wire
config.json_serializer = ClickHouse::Serializer::JsonSerializer
# ...
end
# do application-specific processing
# eventually, write data using the object you just built
Gitlab::Database::Writer.write(
Gitlab::Database::Model::MyTable,
[{ id: 1, foo: 'bar' }],
)
```
We intend to keep `Gitlab::Database::Writer.backend` to be as close to the backend-specific client implementation as possible. Having a wrapper around a vanilla client helps us address peripheral concerns such as service-discovery for the backends while still allowing the user to leverage features of a given client.
### Iterations
Considering the large scope of this undertaking and the need for feedback around actual usage, we intend to build the proposed abstraction(s) across multiple iterations which can be described as follows:
#### Iteration 1 - Develop write abstraction with sync mode enabled
First, research and develop a simple write abstraction that our users can begin to use to write data into ClickHouse. This ensures our choice of the underlying client is well-researched and suffices to fulfill needs of as many reported use-cases as possible. Being able to see this running would help gather user-feedback and improve the write APIs/interfaces accordingly.
Given this feedback and more development with how we aim to deploy ClickHouse across our environments, it'd then be prudent to build into this abstraction necessary conventions, best practices and abstract away details around connection-pooling, service-discovery, etc.
#### Iteration 2 - Add support for schemas & data validation
In the next iteration, we plan to add support for schema usage and validation. This helps keep model definitions sane and allows for validating data to be inserted.
#### Iteration 3 - Add support for async mode, PoC with one backend
With the above two iterations well-executed, we can start to scale up our write abstractions adding the support for writing data into intermediate data stores before writing it into ClickHouse asynchronously. We aim to prototype such an implementation with atleast one such backend.
#### Further iterations
With a backend-agnostic abstraction becoming the ingestion interface a client interacts with, there's various other use-cases that can be solved from within this abstraction. Some of them are:
- Zero-configuration data ingestion from multiple sources
- Dynamically enriching data from multiple sources
- Offloading data to long-term retention data stores
### Possible backend implementations
- Applications writing directly to ClickHouse
- Application-local in-memory queueing/batching of data
- Application-local persistent queueing/batching of data
- Non-local queueing/batching of data before eventually writing into ClickHouse
- Managed cloud backends:
- [Google PubSub](https://cloud.google.com/pubsub)
- [AWS Kinesis](https://aws.amazon.com/kinesis/)
- Self-managed backends:
- [CHProxy](https://www.chproxy.org/)
- [Kafka](https://kafka.apache.org/)
- [RedPanda](https://redpanda.com/)
- [Vector](https://vector.dev/)
- [RabbitMQ](https://www.rabbitmq.com/)
### Additional complexity when using a non-local backend
- The need for running an additional process/sub-system that reads data from the concerned backend and writes it into ClickHouse efficiently and reliably.
- The additional hop across the backend also means that there might be potential delays in how soon this data lands into ClickHouse.
Though the points above describe additional complexity for an application, they can be treated as valid trade-off(s) assuming their need for data ingestion at scale.
### Comparing backends across multiple dimensions
| Dimension | CHProxy | Redis | Google PubSub | Apache Kafka |
|---|---|---|---|---|
| Operations | Trivial | Trivial | Managed | Non-trivial, complex |
| Data Retention | Non-durable | Non-durable | Durable | Durable |
| Performance | Good | Good | High | High |
| Data Streaming | None | Minimal | Good | Best |
| Suitable for self-managed environments | Trivial | Trivial | - | Complex |
## References
- [ClickHouse use-cases within Manage](https://gitlab.com/groups/gitlab-org/-/epics/7964)
- [List down all possible options for postgres to snowflake pipeline](https://gitlab.com/gitlab-data/gitlab.com-saas-data-pipeline/-/issues/13)
- [Design Spike for Snowplow For Data Event capture](https://gitlab.com/gitlab-data/analytics/-/issues/12397)
- [Audit Events Performance Limits](https://gitlab.com/gitlab-org/gitlab/-/issues/375545)

View File

@ -567,6 +567,53 @@ Example
});
```
### Testing local-only Apollo queries and mutations
To add a new query or mutation before it is added to the backend, we can use the `@client` directive. For example:
```graphql
mutation setActiveBoardItemEE($boardItem: LocalBoardItem, $isIssue: Boolean = true) {
setActiveBoardItem(boardItem: $boardItem) @client {
...Issue @include(if: $isIssue)
...EpicDetailed @skip(if: $isIssue)
}
}
```
When writing test cases for such calls, we can use resolvers to make sure they are called with the correct parameters.
For example, when creating the wrapper, we should make sure the resolver is mapped to the query or mutation.
The mutation we are mocking here is `setActiveBoardItem`:
```javascript
const mockSetActiveBoardItemResolver = jest.fn();
const mockApollo = createMockApollo([], {
Mutation: {
setActiveBoardItem: mockSetActiveBoardItemResolver,
},
});
```
In the following code, we must pass four arguments. The second one must be the collection of input variables of the query or mutation mocked.
To test that the mutation is called with the correct parameters:
```javascript
it('calls setActiveBoardItemMutation on close', async () => {
wrapper.findComponent(GlDrawer).vm.$emit('close');
await waitForPromises();
expect(mockSetActiveBoardItemResolver).toHaveBeenCalledWith(
{},
{
boardItem: null,
},
expect.anything(),
expect.anything(),
);
});
```
### Jest best practices
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/34209) in GitLab 13.2.

View File

@ -19,7 +19,15 @@ This feature is not ready for production use.
This page is a work in progress, and we're updating the information as we add more features.
For more information, see the [group direction page](https://about.gitlab.com/direction/analytics/product-analytics/).
## How Product Analytics works
## How product analytics works
Product analytics uses several tools:
- [**Jitsu**](https://jitsu.com/docs) - A web and app event collection platform that provides a consistent API to collect user data and pass it through to ClickHouse.
- [**ClickHouse**](https://clickhouse.com/docs) - A database suited to store, query, and retrieve analytical data.
- [**Cube.js**](https://cube.dev/docs/) - An analytical graphing library that provides an API to run queries against the data stored in Clickhouse.
The following diagram illustrates the product analytics flow:
```mermaid
---
@ -42,12 +50,6 @@ flowchart TB
end
```
Product Analytics uses several tools:
- [**Jitsu**](https://jitsu.com/docs) - A web and app event collection platform that provides a consistent API to collect user data and pass it through to Clickhouse.
- [**Clickhouse**](https://clickhouse.com/docs/) - A database suited to store, query, and retrieve analytical data.
- [**Cube.js**](https://cube.dev/docs/) - An analytical graphing library that provides an API to run queries against the data stored in Clickhouse.
## Enable product analytics
> - Introduced in GitLab 15.6 behind the [feature flag](../../administration/feature_flags.md) named `cube_api_proxy`. Disabled by default.
@ -60,8 +62,8 @@ On self-managed GitLab, by default this feature is not available. To make it ava
On GitLab.com, this feature is not available.
This feature is not ready for production use.
You can enable and configure product analytics to track events
within your project applications on a self-managed instance.
To track events in your project applications on a self-managed instance,
you must enable and configure product analytics.
Prerequisite:
@ -96,10 +98,13 @@ On self-managed GitLab, by default this feature is not available. To make it ava
On GitLab.com, this feature is not available.
This feature is not ready for production use.
Each project can define an unlimited number of dashboards. These dashboards are defined using our YAML schema and stored
in the `.gitlab/product_analytics/dashboards/` directory of a project repository. The name of the file is the name of the dashboard, and visualizations are shared across dashboards.
Each project can have an unlimited number of dashboards.
These dashboards are defined using the GitLab YAML schema, and stored in the `.gitlab/product_analytics/dashboards/` directory of a project repository.
The name of the file is the name of the dashboard.
Each dashboard can contain one or more visualizations (charts), which are shared across dashboards.
Project maintainers can enforce approval rules on dashboard changes using features such as code owners and approval rules. Dashboards are versioned in source control with the rest of a project's code.
Project maintainers can enforce approval rules on dashboard changes using features such as code owners and approval rules.
Dashboards are versioned in source control with the rest of a project's code.
### View project dashboards
@ -114,17 +119,25 @@ To view a list of product analytics dashboards for a project:
1. On the top bar, select **Main menu > Projects** and find your project.
1. On the left sidebar, select **Analytics > Dashboards**.
1. From the list of available dashboards, select the dashboard you want to view.
### Define a dashboard
To define a dashboard:
1. In `.gitlab/product_analytics/dashboards/`, create a directory named like the dashboard. Each dashboard should have its own directory.
1. In the new directory, create a `.yaml` file with the same name as the directory. This file contains the dashboard definition, and must conform to the JSON schema defined in `ee/app/validators/json_schemas/analytics_dashboard.json`.
1. In the `.gitlab/product_analytics/dashboards/visualizations/` directory, create a `yaml` file. This file defines the visualization type for the dashboard, and must conform to the schema in
`ee/app/validators/json_schemas/analytics_visualization.json`.
1. In `.gitlab/product_analytics/dashboards/`, create a directory named like the dashboard.
The example below includes three dashboards and one visualization that applies to all dashboards.
Each dashboard should have its own directory.
1. In the new directory, create a `.yaml` file with the same name as the directory.
This file contains the dashboard definition. It must conform to the JSON schema defined in `ee/app/validators/json_schemas/product_analytics_dashboard.json`.
1. In the `.gitlab/product_analytics/dashboards/visualizations/` directory, create a `.yaml` file.
This file defines the visualization type for the dashboard. It must conform to the schema in
`ee/app/validators/json_schemas/product_analytics_visualization.json`.
For example, if you want to create three dashboards (Conversion funnels, Demographic breakdown, and North star metrics)
and one visualization (line chart) that applies to all dashboards, the file structure would be:
```plaintext
.gitlab/product_analytics/dashboards
@ -140,13 +153,13 @@ The example below includes three dashboards and one visualization that applies t
## Funnel analysis
Funnel analysis can be used to understand the flow of users through your application and where
Use funnel analysis to understand the flow of users through your application, and where
users drop out of a predefined flow (for example, a checkout process or ticket purchase).
Each product can also define an unlimited number of funnels.
These funnels are defined using our YAML schema and stored in the `.gitlab/product_analytics/funnels/` directory of a project repository.
Like dashboards, funnels are defined using the GitLab YAML schema, and stored in the `.gitlab/product_analytics/funnels/` directory of a project repository.
Funnel definitions must include the keys `name`, `seconds_to_convert`, and an array of `steps`.
Funnel definitions must include the keys `name` and `seconds_to_convert`, and an array of `steps`.
| Key | Description |
|----------------------|----------------------------------------------------------|
@ -164,6 +177,8 @@ Each step must include the keys `name`, `target`, and `action`.
### Example funnel definition
The following example defines a funnel that tracks users who completed a purchase within one hour by going through three target pages:
```yaml
name: completed_purchase
seconds_to_convert: 3600
@ -216,11 +231,21 @@ The `afterDate` filter is not supported. Please use `beforeDate` or `inDateRange
Exporting the raw event data from the underlying storage engine can help you debug and create datasets for data analysis.
Because Cube acts as an abstraction layer between the raw data and the API, the exported raw data has some caveats:
- Data is grouped by the selected dimensions. Therefore, the exported data might be incomplete, unless including both `utcTime` and `userAnonymousId`.
- Data is by default limited to 10,000 rows, but you can increase the limit to maximum 50,000 rows. If your dataset has more than 50,000 rows, you must paginate through the results by using the `limit` and `offset` parameters.
- Data is always returned in JSON format. If you need it in a different format, you need to convert the JSON to the required format using a scripting language of your choice.
[Issue 391683](https://gitlab.com/gitlab-org/gitlab/-/issues/391683) tracks efforts to implement a more scalable export solution.
### Export raw data with Cube queries
You can [query the raw data with the REST API](../../api/product_analytics.md#send-query-request-to-cube) and convert the JSON output to any required format.
You can [query the raw data with the REST API](../../api/product_analytics.md#send-query-request-to-cube),
and convert the JSON output to any required format.
You can export the raw data for a specific dimension by passing a list of dimensions to the `dimensions` key. For example, the following query outputs the raw data for the attributes listed:
To export the raw data for a specific dimension, pass a list of dimensions to the `dimensions` key.
For example, the following query outputs the raw data for the attributes listed:
```json
POST /api/v4/projects/PROJECT_ID/product_analytics/request/load?queryType=multi
@ -247,12 +272,3 @@ POST /api/v4/projects/PROJECT_ID/product_analytics/request/load?queryType=multi
```
If the request is successful, the returned JSON includes an array of rows of results.
### Caveats
Because Cube acts as an abstraction layer between the raw data and the API, the exported raw data has some caveats:
- Data is grouped by the selected dimensions. Therefore, the exported data might be incomplete, unless including both `utcTime` and `userAnonymousId`.
- Data is by default limited to 10,000 rows, but you can increase the limit to maximum 50,000 rows. If your dataset has more than 50,000 rows, you need to paginate through the results by using the `limit` and `offset` parameters.
- Data is always returned in JSON format. If you need it in a different format, you need to convert the JSON to the required format using a scripting language of your choice.
- [Issue 391683](https://gitlab.com/gitlab-org/gitlab/-/issues/391683) tracks the implementation of a more scalable export solution.

View File

@ -4,33 +4,32 @@ group: Incubation
info: Machine Learning Experiment Tracking is a GitLab Incubation Engineering program. No technical writer assigned to this group.
---
# MLFlow Client Integration **(FREE)**
# MLFlow client integration **(FREE)**
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/8560) in GitLab 15.6 as an [Experiment](../../../policy/alpha-beta-support.md#experiment) release [with a flag](../../../administration/feature_flags.md) named `ml_experiment_tracking`. Disabled by default.
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/8560) in GitLab 15.11 as an [Experiment](../../../policy/alpha-beta-support.md#experiment) release [with a flag](../../../administration/feature_flags.md) named `ml_experiment_tracking`. Disabled by default.
DISCLAIMER:
MLFlow Client Integration is an experimental feature being developed by the Incubation Engineering Department,
and will receive significant changes over time.
NOTE:
Model experiment tracking is an [experimental feature](../../../policy/alpha-beta-support.md).
Refer to <https://gitlab.com/gitlab-org/gitlab/-/issues/381660> for feedback and feature requests.
[MLFlow](https://mlflow.org/) is one of the most popular open source tools for Machine Learning Experiment Tracking.
GitLabs works as a backend to the MLFlow Client, [logging experiments](../ml/experiment_tracking/index.md).
GitLab works as a backend to the MLFlow Client, [logging experiments](../ml/experiment_tracking/index.md).
Setting up your integrations requires minimal changes to existing code.
GitLab plays the role of proxy server, both for artifact storage and tracking data. It reflects the
MLFlow [Scenario 5](https://www.mlflow.org/docs/latest/tracking.html#scenario-5-mlflow-tracking-server-enabled-with-proxied-artifact-storage-access).
## Enable MLFlow Client Integration
Complete this task to enable MLFlow Client Integration.
## Enable MLFlow client integration
Prerequisites:
- A [personal access token](../../../user/profile/personal_access_tokens.md) for the project, with minimum access level of `api`.
- The project ID. To find the project ID, on the top bar, select **Main menu > Projects** and find your project. On the left sidebar, select **Settings > General**.
1. Set the tracking URI and token environment variables on the host that runs the code (your local environment, CI pipeline, or remote host).
To enable MLFlow client integration:
For example:
1. Set the tracking URI and token environment variables on the host that runs the code.
This can be your local environment, CI pipeline, or remote host. For example:
```shell
export MLFLOW_TRACKING_URI="http://<your gitlab endpoint>/api/v4/projects/<your project id>/ml/mlflow"
@ -39,43 +38,48 @@ Prerequisites:
1. If your training code contains the call to `mlflow.set_tracking_uri()`, remove it.
When running the training code, MLFlow will create experiments, runs, log parameters, metrics,
When running the training code, MLFlow creates experiments, runs, log parameters, metrics, metadata
and artifacts on GitLab.
After experiments are logged, they are listed under `/<your project>/-/ml/experiments`. Runs are registered as Model Candidates,
that can be explored by selecting an experiment.
After experiments are logged, they are listed under `/<your project>/-/ml/experiments`.
Runs are registered as:
- Model Candidates, which can be explored by selecting an experiment.
- Tags, which are registered as metadata.
## Supported MlFlow client methods and caveats
GitLab supports these methods from the MLFlow client. Other methods might be supported but were not
tested. More information can be found in the [MLFlow Documentation](https://www.mlflow.org/docs/1.28.0/python_api/mlflow.html).
| Method | Supported | Version Added | Comments |
|--------------------------|------------------|----------------|----------|
| `get_experiment` | Yes | 15.11 | |
| `get_experiment_by_name` | Yes | 15.11 | |
| `set_experiment` | Yes | 15.11 | |
| `get_run` | Yes | 15.11 | |
| `start_run` | Yes | 15.11 | |
| `log_artifact` | Yes with caveat | 15.11 | (15.11) `artifact_path` must be empty string. Does not support directories.
| `log_artifacts` | Yes with caveat | 15.11 | (15.11) `artifact_path` must be empty string. Does not support directories.
| `log_batch` | Yes | 15.11 | |
| `log_metric` | Yes | 15.11 | |
| `log_metrics` | Yes | 15.11 | |
| `log_param` | Yes | 15.11 | |
| `log_params` | Yes | 15.11 | |
| `log_figure` | Yes | 15.11 | |
| `log_image` | Yes | 15.11 | |
| `log_text` | Yes with caveat | 15.11 | (15.11) Does not support directories.
| `log_dict` | Yes with caveat | 15.11 | (15.11) Does not support directories.
| `set_tag` | Yes | 15.11 | |
| `set_tags` | Yes | 15.11 | |
| `set_terminated` | Yes | 15.11 | |
| `end_run` | Yes | 15.11 | |
| `update_run` | Yes | 15.11 | |
| `log_model` | Partial | 15.11 | (15.11) Saves the artifacts, but not the model data. `artifact_path` must be empty.
## Limitations
- The API GitLab supports is the one defined at MLFlow version 1.28.0.
- API endpoints not listed above are not supported.
- During creation of experiments and runs, tags are ExperimentTags and RunTags are stored, even though they are not displayed.
- During creation of experiments and runs, ExperimentTags are stored, even though they are not displayed.
- MLFlow Model Registry is not supported.
## Supported methods and caveats
This is a list of methods we support from the MLFlow client. Other methods might be supported but were not
tested. More information can be found in the [MLFlow Documentation](https://www.mlflow.org/docs/1.28.0/python_api/mlflow.html).
### `set_experiment()`
Accepts both `experiment_name` and `experiment_id`
### `start_run()`
- Nested runs have not been tested.
- `run_name` is not supported
### `log_param()`, `log_params()`, `log_metric()`, `log_metrics()`
Work as defined by the documentation
### `log_artifact()`, `log_artifacts()`
`artifact_path` must be empty string.
### `log_model()`
This is an experimental method in MLFlow, and partial support is offered. It stores the model artifacts, but does
not log the model information. The `artifact_path` parameter must be set to `''`, because Generic Packages do not support folder
structure.

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

View File

@ -4,78 +4,86 @@ group: Incubation
info: Machine Learning Experiment Tracking is a GitLab Incubation Engineering program. No technical writer assigned to this group.
---
# Machine Learning Experiment Tracking **(FREE)**
# Machine learning model experiments **(FREE)**
DISCLAIMER:
Machine Learning Experiment Tracking is an experimental feature being developed by the Incubation Engineering Department,
and will receive significant changes over time. This feature is being release with the aim of getting user feedback, but
is not stable and can lead to performance degradation. See below on how to disable this feature.
FLAG:
On self-managed GitLab, model experiment tracking is disabled by default.
To enable the feature, ask an administrator to [enable the feature flag](../../../../administration/feature_flags.md) named `ml_experiment_tracking`.
On GitLab.com, this feature is in private testing only.
When creating machine learning models, data scientists often experiment with different parameters, configurations, feature
engineering, and so on, to improve the performance of the model. Keeping track of all this metadata and the associated
NOTE:
Model experiment tracking is an [experimental feature](../../../../policy/alpha-beta-support.md). Refer to <https://gitlab.com/gitlab-org/gitlab/-/issues/381660> for feedback and feature requests.
When creating machine learning models, data scientists often experiment with different parameters, configurations, and feature
engineering to improve the performance of the model. Keeping track of all this metadata and the associated
artifacts so that the data scientist can later replicate the experiment is not trivial. Machine learning experiment
tracking enables them to log parameters, metrics, and artifacts directly into GitLab, giving easy access later on.
![List of Experiments](img/experiments_v15_7.png)
These features have been proposed:
![Experiment Candidates](img/candidates_v15_7.png)
- Searching experiments.
- Visual comparison of candidates.
- Creating, deleting, and updating experiments through the GitLab UI.
- Creating, deleting, and updating candidates through the GitLab UI.
![Candidate Detail](img/candidate_v15_7.png)
For feature requests, see [epic 9341](https://gitlab.com/groups/gitlab-org/-/epics/9341).
## What is an experiment?
An experiment is a collection of comparable model candidates. Experiments can be long lived (for example, when they represent
a use case), or short lived (results from hyperparameter tuning triggered by a merge request), but usually hold model candidates
that have a similar set of parameters and metrics.
In a project, an experiment is a collection of comparable model candidates.
Experiments can be long-lived (for example, when they represent a use case), or
short-lived (results from hyperparameter tuning triggered by a merge request),
but usually hold model candidates that have a similar set of parameters measured
by the same metrics.
![List of Experiments](img/experiments_v15_11.png)
## Model candidate
A model candidate is a variation of the training of a machine learning model, that can be eventually promoted to a version
of the model. The goal of a data scientist is to find the model candidate whose parameter values lead to the best model
of the model.
![Experiment Candidates](img/candidates_v15_11.png)
The goal of a data scientist is to find the model candidate whose parameter values lead to the best model
performance, as indicated by the given metrics.
Example parameters:
![Candidate Detail](img/candidate_v15_11.png)
- Algorithm (linear regression, decision tree, and so on).
Some example parameters:
- Algorithm (such as linear regression or decision tree).
- Hyperparameters for the algorithm (learning rate, tree depth, number of epochs).
- Features included.
## Usage
## Track new experiments and candidates
### User access management
Experiment and trials can only be tracked through the
[MLFlow](https://www.mlflow.org/docs/latest/tracking.html) client integration.
See [MLFlow client integration](../../integrations/mlflow_client.md) for more information
on how to use GitLab as a backend for the MLFlow Client.
An experiment is always associated to a project. Only users with access to the project an experiment is associated with
can view that experiment data.
## Explore model candidates
### Tracking new experiments and trials
Prerequisites:
Experiment and trials can only be tracked through the [MLFlow](https://www.mlflow.org/docs/latest/tracking.html) client
integration. More information on how to use GitLab as a backend for MLFlow Client can be found [at the documentation page](../../integrations/mlflow_client.md).
- You must have at least the Developer role to view experiment data.
### Exploring model candidates
To list the current active experiments, either go to `https/-/ml/experiments` or:
To list the current active experiments, navigate to `https/-/ml/experiments`. To display all trials
that have been logged, along with their metrics and parameters, select an experiment. To display details for a candidate,
select **Details**.
1. On the top bar, select **Main menu > Projects** and find your project.
1. On the left sidebar, select **Packages & registries > Model experiments**.
1. To display all candidates that have been logged, along with their metrics, parameters, and metadata, select an experiment.
1. To display details for a candidate, select **Details**.
### Logging artifacts
## View log artifacts
Trial artifacts are saved as [generic packages](../../../packages/generic_packages/index.md), and follow all their
conventions. After an artifact is logged for a candidate, all artifacts logged for the candidate are listed in the
package registry. The package name for a candidate is `ml_candidate_<candidate_id>`, with version `-`. The link to the
artifacts can also be accessed from the **Experiment Candidates** list or **Candidate detail**.
limitations. After an artifact is logged for a candidate, all artifacts logged for the candidate are listed in the
package registry. The package name for a candidate is `ml_experiment_<experiment_id>`, where the version is the candidate
IID. The link to the artifacts can also be accessed from the **Experiment Candidates** list or **Candidate detail**.
### Limitations and future
## Related topics
- Searching experiments, searching trials, visual comparison of trials, and creating, deleting and updating experiments and trials through GitLab UI is under development.
## Disabling or enabling the Feature
On self-managed GitLab, ML Experiment Tracking is disabled by default. To enable the feature, ask an administrator to [disable the feature flag](../../../../administration/feature_flags.md) named `ml_experiment_tracking`.
On GitLab.com, this feature is currently on private testing.
## Feedback, roadmap and reports
For updates on the development, refer to the [development epic](https://gitlab.com/groups/gitlab-org/-/epics/8560).
For feedback, bug reports and feature requests, refer to the [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/381660).
- Development details in [epic 8560](https://gitlab.com/groups/gitlab-org/-/epics/8560).
- Add feedback in [issue 381660](https://gitlab.com/gitlab-org/gitlab/-/issues/381660).

View File

@ -80,10 +80,11 @@ To create a release in the Releases page:
- Select an existing Git tag. Selecting an existing tag that is already associated with a release
results in a validation error.
- Enter a new Git tag name.
1. From the **Create from** dropdown list, select a branch or commit SHA to use when
1. From the **Create tag** popover, select a branch or commit SHA to use when
creating the new tag.
1. Optional. In the **Set tag message** text box, enter a message to create an
[annotated tag](https://git-scm.com/book/en/v2/Git-Basics-Tagging#_annotated_tags).
1. Select **Save**.
1. Optional. Enter additional information about the release, including:
- [Title](release_fields.md#title).
- [Milestones](#associate-milestones-with-a-release).

View File

@ -53,8 +53,6 @@ module API
end
def current_runner_manager
return if Feature.disabled?(:create_runner_machine)
strong_memoize(:current_runner_manager) do
system_xid = params.fetch(:system_id, LEGACY_SYSTEM_XID)
current_runner&.ensure_manager(system_xid) { |m| m.contacted_at = Time.current }
@ -96,7 +94,7 @@ module API
# the heartbeat should be triggered.
if heartbeat_runner
job.runner&.heartbeat(get_runner_ip)
job.runner_manager&.heartbeat(get_runner_ip) if Feature.enabled?(:runner_machine_heartbeat)
job.runner_manager&.heartbeat(get_runner_ip)
end
job

View File

@ -349,11 +349,14 @@ module API
file: params[:file],
size: params['file.size'],
file_name: file_name,
file_type: params['file.type'],
file_sha1: params['file.sha1'],
file_md5: params['file.md5']
}
if Feature.enabled?(:read_fingerprints_from_uploaded_file_in_maven_upload, user_project)
file_params.merge!(size: params[:file].size, file_sha1: params[:file].sha1, file_md5: params[:file].md5)
end
::Packages::CreatePackageFileService.new(package, file_params.merge(build: current_authenticated_job)).execute
track_package_event('push_package', :maven, project: user_project, namespace: user_project.namespace) if jar_file?(format)
end

View File

@ -1375,9 +1375,14 @@ module API
params do
requires :runner_type, type: String, values: ::Ci::Runner.runner_types.keys,
desc: %q(Specifies the scope of the runner)
given runner_type: ->(runner_type) { %i[group_type project_type].include? runner_type } do
requires :namespace_id, type: Integer,
desc: 'The ID of the project or group that the runner is created in',
given runner_type: ->(runner_type) { runner_type == 'group_type' } do
requires :group_id, type: Integer,
desc: 'The ID of the group that the runner is created in',
documentation: { example: 1 }
end
given runner_type: ->(runner_type) { runner_type == 'project_type' } do
requires :project_id, type: Integer,
desc: 'The ID of the project that the runner is created in',
documentation: { example: 1 }
end
optional :description, type: String, desc: %q(Description of the runner)
@ -1397,18 +1402,15 @@ module API
end
post 'runners', urgency: :low, feature_category: :runner_fleet do
attributes = attributes_for_keys(
%i[runner_type namespace_id description maintenance_note paused locked run_untagged tag_list
%i[runner_type group_id project_id description maintenance_note paused locked run_untagged tag_list
access_level maximum_timeout]
)
namespace_id = attributes.delete(:namespace_id)
if namespace_id
case attributes[:runner_type]
when 'group_type'
attributes[:scope] = ::Group.find(namespace_id)
when 'project_type'
attributes[:scope] = ::Project.find(namespace_id)
end
case attributes[:runner_type]
when 'group_type'
attributes[:scope] = ::Group.find_by_id(attributes.delete(:group_id))
when 'project_type'
attributes[:scope] = ::Project.find_by_id(attributes.delete(:project_id))
end
result = ::Ci::Runners::CreateRunnerService.new(user: current_user, params: attributes).execute

View File

@ -18,8 +18,7 @@ module Gitlab
mode: :per_attribute_iv,
algorithm: 'aes-256-gcm',
key: Settings.attr_encrypted_db_key_base_32,
encode: false,
encode_vi: false
encode: false
before_save :copy_token_to_encrypted_token

View File

@ -27,8 +27,13 @@ module Gitlab
build_record = model.new(attrs)
if build_record.invalid?
log_error(object[:id], build_record.errors.full_messages)
errors << build_record.errors
github_identifiers = github_identifiers(object)
log_error(github_identifiers, build_record.errors.full_messages)
errors << {
validation_errors: build_record.errors,
github_identifiers: github_identifiers
}
next
end
@ -53,17 +58,18 @@ module Gitlab
raise NotImplementedError
end
def bulk_insert_failures(validation_errors)
rows = validation_errors.map do |error|
def bulk_insert_failures(errors)
rows = errors.map do |error|
correlation_id_value = Labkit::Correlation::CorrelationId.current_or_new_id
{
source: self.class.name,
exception_class: 'ActiveRecord::RecordInvalid',
exception_message: error.full_messages.first.truncate(255),
exception_message: error[:validation_errors].full_messages.first.truncate(255),
correlation_id_value: correlation_id_value,
retry_count: nil,
created_at: Time.zone.now
created_at: Time.zone.now,
external_identifiers: error[:github_identifiers]
}
end
@ -88,15 +94,19 @@ module Gitlab
)
end
def log_error(object_id, messages)
def log_error(github_identifiers, messages)
Gitlab::Import::Logger.error(
import_type: :github,
project_id: project.id,
importer: self.class.name,
message: messages,
github_identifier: object_id
github_identifiers: github_identifiers
)
end
def github_identifiers(object)
raise NotImplementedError
end
end
end
end

View File

@ -24,7 +24,7 @@ module Gitlab
private
def collection
project.issues.select(:id, :description)
project.issues.select(:id, :description, :iid)
end
def ordering_column

View File

@ -24,7 +24,7 @@ module Gitlab
private
def collection
project.merge_requests.select(:id, :description)
project.merge_requests.select(:id, :description, :iid)
end
def ordering_column

View File

@ -24,7 +24,7 @@ module Gitlab
private
def collection
project.releases.select(:id, :description)
project.releases.select(:id, :description, :tag)
end
end
end

View File

@ -53,9 +53,18 @@ module Gitlab
:label
end
private
def model
Label
end
def github_identifiers(label)
{
title: label[:name],
object_type: object_type
}
end
end
end
end

View File

@ -57,9 +57,19 @@ module Gitlab
:milestone
end
private
def model
Milestone
end
def github_identifiers(milestone)
{
iid: milestone[:number],
title: milestone[:title],
object_type: object_type
}
end
end
end
end

View File

@ -6,7 +6,7 @@ module Gitlab
class NoteAttachmentsImporter
attr_reader :note_text, :project
# note_text - An instance of `NoteText`.
# note_text - An instance of `Gitlab::GithubImport::Representation::NoteText`.
# project - An instance of `Project`.
# client - An instance of `Gitlab::GithubImport::Client`.
def initialize(note_text, project, _client = nil)

View File

@ -18,6 +18,7 @@ module Gitlab
review_requests = client.pull_request_review_requests(repo, merge_request.iid)
review_requests[:merge_request_id] = merge_request.id
review_requests[:merge_request_iid] = merge_request.iid
yield review_requests
mark_merge_request_imported(merge_request)

View File

@ -55,6 +55,7 @@ module Gitlab
Gitlab::GithubImport::ObjectCounter.increment(project, object_type, :fetched)
review[:merge_request_id] = merge_request.id
review[:merge_request_iid] = merge_request.iid
yield(review)
mark_as_imported(review)

View File

@ -73,6 +73,13 @@ module Gitlab
def model
Release
end
def github_identifiers(release)
{
tag: release[:tag_name],
object_type: object_type
}
end
end
end
end

View File

@ -34,7 +34,10 @@ module Gitlab
end
def github_identifiers
{ id: id }
{
id: id,
login: login
}
end
end
end

View File

@ -79,7 +79,8 @@ module Gitlab
def github_identifiers
{
iid: iid,
issuable_type: issuable_type
issuable_type: issuable_type,
title: title
}
end
end

View File

@ -20,7 +20,11 @@ module Gitlab
end
def github_identifiers
{ id: id }
{
id: id,
iid: issuable_id,
event: event
}
end
def issuable_type

View File

@ -33,7 +33,8 @@ module Gitlab
def github_identifiers
{
oid: oid
oid: oid,
size: size
}
end
end

View File

@ -16,36 +16,36 @@ module Gitlab
attr_reader :attributes
expose_attribute :record_db_id, :record_type, :text
expose_attribute :record_db_id, :record_type, :text, :iid, :tag, :noteable_type
class << self
# Builds a note text representation from DB record of Note or Release.
#
# record - An instance of `Note`, `Release`, `Issue`, `MergeRequest` model
def from_db_record(record)
check_record_class!(record)
# Builds a note text representation from DB record of Note or Release.
#
# record - An instance of `Note`, `Release`, `Issue`, `MergeRequest` model
def self.from_db_record(record)
check_record_class!(record)
record_type = record.class.name
# only column for note is different along MODELS_ALLOWLIST
text = record.is_a?(::Note) ? record.note : record.description
new(
record_db_id: record.id,
record_type: record_type,
text: text
)
end
def from_json_hash(raw_hash)
new Representation.symbolize_hash(raw_hash)
end
private
def check_record_class!(record)
raise ModelNotSupported, record.class.name if MODELS_ALLOWLIST.exclude?(record.class)
end
record_type = record.class.name
# only column for note is different along MODELS_ALLOWLIST
text = record.is_a?(::Note) ? record.note : record.description
new(
record_db_id: record.id,
record_type: record_type,
text: text,
iid: record.try(:iid),
tag: record.try(:tag),
noteable_type: record.try(:noteable_type)
)
end
def self.from_json_hash(raw_hash)
new Representation.symbolize_hash(raw_hash)
end
def self.check_record_class!(record)
raise ModelNotSupported, record.class.name if MODELS_ALLOWLIST.exclude?(record.class)
end
private_class_method :check_record_class!
# attributes - A Hash containing the event details. The keys of this
# Hash (and any nested hashes) must be symbols.
def initialize(attributes)
@ -53,7 +53,22 @@ module Gitlab
end
def github_identifiers
{ db_id: record_db_id }
{
db_id: record_db_id
}.merge(record_type_specific_attribute)
end
private
def record_type_specific_attribute
case record_type
when ::Release.name
{ tag: tag }
when ::Issue.name, ::MergeRequest.name
{ noteable_iid: iid }
when ::Note.name
{ noteable_type: noteable_type }
end
end
end
end

View File

@ -111,7 +111,8 @@ module Gitlab
def github_identifiers
{
iid: iid,
issuable_type: issuable_type
issuable_type: issuable_type,
title: title
}
end
end

View File

@ -9,7 +9,7 @@ module Gitlab
attr_reader :attributes
expose_attribute :author, :note, :review_type, :submitted_at, :merge_request_id, :review_id
expose_attribute :author, :note, :review_type, :submitted_at, :merge_request_id, :merge_request_iid, :review_id
# Builds a PullRequestReview from a GitHub API response.
#
@ -19,6 +19,7 @@ module Gitlab
new(
merge_request_id: review[:merge_request_id],
merge_request_iid: review[:merge_request_iid],
author: user,
note: review[:body],
review_type: review[:state],
@ -49,8 +50,8 @@ module Gitlab
def github_identifiers
{
review_id: review_id,
merge_request_id: merge_request_id
merge_request_iid: merge_request_iid,
review_id: review_id
}
end
end

View File

@ -10,7 +10,7 @@ module Gitlab
attr_reader :attributes
expose_attribute :merge_request_id, :users
expose_attribute :merge_request_id, :merge_request_iid, :users
class << self
# Builds a list of requested reviewers from a GitHub API response.
@ -24,6 +24,7 @@ module Gitlab
new(
merge_request_id: review_requests[:merge_request_id],
merge_request_iid: review_requests[:merge_request_iid],
users: users
)
end
@ -37,7 +38,10 @@ module Gitlab
end
def github_identifiers
{ merge_request_id: merge_request_id }
{
merge_request_iid: merge_request_iid,
requested_reviewers: users.pluck(:login) # rubocop: disable CodeReuse/ActiveRecord
}
end
end
end

View File

@ -42,7 +42,7 @@ namespace :tw do
CodeOwnerRule.new('Distribution (Omnibus)', '@axil'),
CodeOwnerRule.new('Documentation Guidelines', '@sselhorn'),
CodeOwnerRule.new('Dynamic Analysis', '@rdickenson'),
CodeOwnerRule.new('Editor', '@ashrafkhamis'),
CodeOwnerRule.new('IDE', '@ashrafkhamis'),
CodeOwnerRule.new('Foundations', '@sselhorn'),
# CodeOwnerRule.new('Fulfillment Platform', ''),
CodeOwnerRule.new('Fuzz Testing', '@rdickenson'),

View File

@ -7,6 +7,7 @@ require "fileutils"
class UploadedFile
InvalidPathError = Class.new(StandardError)
UnknownSizeError = Class.new(StandardError)
ALLOWED_KWARGS = %i[filename content_type sha256 remote_id size upload_duration sha1 md5].freeze
# The filename, *not* including the path, of the "uploaded" file
attr_reader :original_filename
@ -17,12 +18,11 @@ class UploadedFile
# The content type of the "uploaded" file
attr_accessor :content_type
attr_reader :remote_id
attr_reader :sha256
attr_reader :size
attr_reader :upload_duration
attr_reader :remote_id, :sha256, :size, :upload_duration, :sha1, :md5
def initialize(path, **kwargs)
validate_kwargs(kwargs)
def initialize(path, filename: nil, content_type: "application/octet-stream", sha256: nil, remote_id: nil, size: nil, upload_duration: nil)
if path.present?
raise InvalidPathError, "#{path} file does not exist" unless ::File.exist?(path)
@ -30,23 +30,24 @@ class UploadedFile
@size = @tempfile.size
else
begin
@size = Integer(size)
@size = Integer(kwargs[:size])
rescue ArgumentError, TypeError
raise UnknownSizeError, 'Unable to determine file size'
end
end
begin
@upload_duration = Float(upload_duration)
@upload_duration = Float(kwargs[:upload_duration])
rescue ArgumentError, TypeError
@upload_duration = 0
end
@content_type = content_type
@original_filename = sanitize_filename(filename || path || '')
@content_type = content_type
@sha256 = sha256
@remote_id = remote_id
@content_type = kwargs[:content_type] || 'application/octet-stream'
@original_filename = sanitize_filename(kwargs[:filename] || path || '')
@sha256 = kwargs[:sha256]
@sha1 = kwargs[:sha1]
@md5 = kwargs[:md5]
@remote_id = kwargs[:remote_id]
end
def self.from_params(params, upload_paths)
@ -65,14 +66,16 @@ class UploadedFile
end
end
UploadedFile.new(
new(
file_path,
filename: params['name'],
content_type: params['type'] || 'application/octet-stream',
sha256: params['sha256'],
remote_id: remote_id,
size: params['size'],
upload_duration: params['upload_duration']
upload_duration: params['upload_duration'],
sha1: params['sha1'],
md5: params['md5']
).tap do |uploaded_file|
::Gitlab::Instrumentation::Uploads.track(uploaded_file)
end
@ -111,4 +114,11 @@ class UploadedFile
def respond_to?(method_name, include_private = false) #:nodoc:
@tempfile.respond_to?(method_name, include_private) || super
end
private
def validate_kwargs(kwargs)
invalid_kwargs = kwargs.keys - ALLOWED_KWARGS
raise ArgumentError, "unknown keyword(s): #{invalid_kwargs.join(', ')}" if invalid_kwargs.any?
end
end

View File

@ -1376,6 +1376,9 @@ msgstr ""
msgid "(removed)"
msgstr ""
msgid "(required)"
msgstr ""
msgid "(revoked)"
msgstr ""
@ -1388,9 +1391,6 @@ msgstr ""
msgid "* All times are in UTC unless specified"
msgstr ""
msgid "*Required"
msgstr ""
msgid "+ %{amount} more"
msgstr ""
@ -11117,6 +11117,9 @@ msgstr ""
msgid "Configure a %{codeStart}.gitlab-webide.yml%{codeEnd} file in the %{codeStart}.gitlab%{codeEnd} directory to start using the Web Terminal. %{helpStart}Learn more.%{helpEnd}"
msgstr ""
msgid "Configure advanced permissions"
msgstr ""
msgid "Configure advanced permissions, Large File Storage, two-factor authentication, and CI/CD settings."
msgstr ""
@ -12356,9 +12359,6 @@ msgstr ""
msgid "Create snippet"
msgstr ""
msgid "Create tag %{tagName}"
msgstr ""
msgid "Create testing scenarios by defining project conditions in your development platform."
msgstr ""
@ -12383,7 +12383,7 @@ msgstr ""
msgid "Create, update, or delete a merge request."
msgstr ""
msgid "CreateGitTag|Add a message to the tag. Leaving this blank creates a %{linkStart}lightweight tag%{linkEnd}."
msgid "CreateGitTag|Add a message to the tag. Leaving this blank creates a lightweight tag."
msgstr ""
msgid "CreateGitTag|Set tag message"
@ -29424,9 +29424,6 @@ msgstr ""
msgid "No suggestions found"
msgstr ""
msgid "No tag selected"
msgstr ""
msgid "No template"
msgstr ""
@ -31805,6 +31802,9 @@ msgstr ""
msgid "Permissions and group features"
msgstr ""
msgid "Permissions and project features"
msgstr ""
msgid "Personal Access Token"
msgstr ""
@ -33425,6 +33425,9 @@ msgstr ""
msgid "Proceed"
msgstr ""
msgid "Product Analytics"
msgstr ""
msgid "Product analytics"
msgstr ""
@ -33548,6 +33551,9 @@ msgstr ""
msgid "ProductAnalytics|Instrumentation details"
msgstr ""
msgid "ProductAnalytics|Loading instance"
msgstr ""
msgid "ProductAnalytics|Measure All tracked Events"
msgstr ""
@ -33587,6 +33593,9 @@ msgstr ""
msgid "ProductAnalytics|Set up product analytics"
msgstr ""
msgid "ProductAnalytics|Set up to track how your product is performing and optimize your product and development processes."
msgstr ""
msgid "ProductAnalytics|Steps to add product analytics as a CommonJS module"
msgstr ""
@ -33623,6 +33632,9 @@ msgstr ""
msgid "ProductAnalytics|Users"
msgstr ""
msgid "ProductAnalytics|Waiting for events"
msgstr ""
msgid "ProductAnalytics|What do you want to measure?"
msgstr ""
@ -36607,6 +36619,12 @@ msgstr ""
msgid "Release|Create a new release"
msgstr ""
msgid "Release|Create tag"
msgstr ""
msgid "Release|Create tag %{tag}"
msgstr ""
msgid "Release|Getting started with releases"
msgstr ""
@ -36619,12 +36637,21 @@ msgstr ""
msgid "Release|More information"
msgstr ""
msgid "Release|Or type a new tag name"
msgstr ""
msgid "Release|Release %{createdRelease} has been successfully created."
msgstr ""
msgid "Release|Releases are based on Git tags and mark specific points in a project's development history. They can contain information about the type of changes and can also deliver binaries, like compiled versions of your software."
msgstr ""
msgid "Release|Search or create tag name"
msgstr ""
msgid "Release|Select another tag"
msgstr ""
msgid "Release|Something went wrong while creating a new release."
msgstr ""
@ -38080,6 +38107,9 @@ msgstr ""
msgid "Runners|Existing runners are not affected. To permit runner registration for all groups, enable this setting in the Admin Area in Settings &gt; CI/CD."
msgstr ""
msgid "Runners|Existing runners are not affected. To permit runner registration for all projects, enable this setting in the Admin Area in Settings &gt; CI/CD."
msgstr ""
msgid "Runners|Fetch GitLab Runner release version data from GitLab.com"
msgstr ""
@ -38182,6 +38212,9 @@ msgstr ""
msgid "Runners|New instance runner"
msgstr ""
msgid "Runners|New project runners can be registered"
msgstr ""
msgid "Runners|New registration token generated!"
msgstr ""
@ -39074,9 +39107,6 @@ msgstr ""
msgid "Search milestones"
msgstr ""
msgid "Search or create tag"
msgstr ""
msgid "Search or filter commits"
msgstr ""
@ -40881,6 +40911,9 @@ msgstr ""
msgid "Set to 0 for no size limit."
msgstr ""
msgid "Set up"
msgstr ""
msgid "Set up CI/CD"
msgstr ""

View File

@ -186,4 +186,19 @@ RSpec.describe "Admin::Projects", feature_category: :projects do
end
end
end
describe 'project runner registration edit' do
it 'updates runner registration' do
visit edit_admin_namespace_project_path({ id: project.to_param, namespace_id: project.namespace.to_param })
expect(find_field('New project runners can be registered')).to be_checked
uncheck 'New project runners can be registered'
click_button 'Save changes'
visit edit_admin_namespace_project_path({ id: project.to_param, namespace_id: project.namespace.to_param })
expect(find_field('New project runners can be registered')).not_to be_checked
end
end
end

View File

@ -36,6 +36,7 @@ RSpec.describe 'User creates release', :js, feature_category: :continuous_delive
it 'defaults the "Create from" dropdown to the project\'s default branch' do
select_new_tag_name(tag_name)
expect(page).to have_button(project.default_branch)
expect(page.find('[data-testid="create-from-field"] .ref-selector button')).to have_content(project.default_branch)
end
@ -123,13 +124,12 @@ RSpec.describe 'User creates release', :js, feature_category: :continuous_delive
let(:new_page_url) { new_project_release_path(project, tag_name: 'v1.1.0') }
it 'creates release with preselected tag' do
page.within '[data-testid="tag-name-field"]' do
expect(page).to have_text('v1.1.0')
end
expect(page).to have_button 'v1.1.0'
open_tag_popover 'v1.1.0'
expect(page).not_to have_selector('[data-testid="create-from-field"]')
fill_release_title("test release")
click_button('Create release')
wait_for_all_requests

View File

@ -18,7 +18,7 @@ exports[`Blob Header Default Actions rendering matches the snapshot 1`] = `
</div>
<div
class="gl-display-flex gl-flex-wrap-wrap file-actions"
class="gl-display-flex gl-flex-wrap file-actions"
>
<viewer-switcher-stub
docicon="document"

Some files were not shown because too many files have changed in this diff Show More