Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-04-03 21:08:25 +00:00
parent 7c8468c5ba
commit 21cf3e773d
79 changed files with 1093 additions and 419 deletions

View File

@ -229,7 +229,6 @@ Gitlab/StrongMemoizeAttr:
- 'app/services/packages/cleanup/update_policy_service.rb'
- 'app/services/packages/composer/create_package_service.rb'
- 'app/services/packages/debian/extract_changes_metadata_service.rb'
- 'app/services/packages/debian/find_or_create_package_service.rb'
- 'app/services/packages/debian/generate_distribution_key_service.rb'
- 'app/services/packages/debian/generate_distribution_service.rb'
- 'app/services/packages/debian/process_changes_service.rb'

View File

@ -806,8 +806,6 @@ Layout/EmptyLineAfterMagicComment:
- 'spec/services/packages/debian/extract_changes_metadata_service_spec.rb'
- 'spec/services/packages/debian/extract_deb_metadata_service_spec.rb'
- 'spec/services/packages/debian/parse_debian822_service_spec.rb'
- 'spec/services/packages/debian/process_changes_service_spec.rb'
- 'spec/services/packages/debian/process_package_file_service_spec.rb'
- 'spec/services/packages/helm/extract_file_metadata_service_spec.rb'
- 'spec/services/packages/helm/process_file_service_spec.rb'
- 'spec/services/packages/maven/create_package_service_spec.rb'

View File

@ -1542,7 +1542,6 @@ Layout/LineLength:
- 'ee/spec/controllers/operations_controller_spec.rb'
- 'ee/spec/controllers/projects/analytics/cycle_analytics/summary_controller_spec.rb'
- 'ee/spec/controllers/projects/audit_events_controller_spec.rb'
- 'ee/spec/controllers/projects/dependencies_controller_spec.rb'
- 'ee/spec/controllers/projects/integrations/jira/issues_controller_spec.rb'
- 'ee/spec/controllers/projects/integrations/zentao/issues_controller_spec.rb'
- 'ee/spec/controllers/projects/issues_controller_spec.rb'

View File

@ -2840,7 +2840,6 @@ RSpec/ContextWording:
- 'spec/services/packages/create_package_file_service_spec.rb'
- 'spec/services/packages/debian/create_distribution_service_spec.rb'
- 'spec/services/packages/debian/find_or_create_incoming_service_spec.rb'
- 'spec/services/packages/debian/process_changes_service_spec.rb'
- 'spec/services/packages/helm/process_file_service_spec.rb'
- 'spec/services/packages/maven/create_package_service_spec.rb'
- 'spec/services/packages/maven/find_or_create_package_service_spec.rb'

View File

@ -398,8 +398,6 @@ RSpec/ExpectChange:
- 'spec/services/milestones/find_or_create_service_spec.rb'
- 'spec/services/milestones/transfer_service_spec.rb'
- 'spec/services/namespaces/statistics_refresher_service_spec.rb'
- 'spec/services/packages/debian/process_changes_service_spec.rb'
- 'spec/services/packages/debian/process_package_file_service_spec.rb'
- 'spec/services/pages_domains/create_service_spec.rb'
- 'spec/services/post_receive_service_spec.rb'
- 'spec/services/projects/cleanup_service_spec.rb'

View File

@ -51,7 +51,6 @@ Style/RedundantFreeze:
- 'app/services/grafana/proxy_service.rb'
- 'app/services/import/validate_remote_git_endpoint_service.rb'
- 'app/services/issues/base_service.rb'
- 'app/services/packages/debian/process_package_file_service.rb'
- 'app/services/projects/import_error_filter.rb'
- 'app/services/projects/lfs_pointers/lfs_object_download_list_service.rb'
- 'app/services/prometheus/proxy_variable_substitution_service.rb'

View File

@ -1 +1 @@
763a199d4d1425815bdb7284356f2fe549edb1c3
d2ebb7f9a436d1476a2b750e844fe3525ae9dc8b

View File

@ -2,7 +2,7 @@ import { __, s__ } from '~/locale';
export const GENERIC_ERROR = __('Something went wrong on our end. Please try again!');
export const LOAD_SINGLE_DIFF_FAILED = s__(
'MergeRequest|Encountered an issue while trying to fetch the single file diff.',
"MergeRequest|Can't fetch the diff needed to update this view. Please reload this page.",
);
export const DISCUSSION_SINGLE_DIFF_FAILED = s__(
"MergeRequest|Can't fetch the single file diff for the discussion. Please reload this page.",

View File

@ -0,0 +1,22 @@
<script>
import { s__ } from '~/locale';
export default {
props: {
project: {
type: Object,
required: false,
default: () => ({}),
},
},
i18n: {
pageTitle: s__('Import|GitHub import details'),
},
};
</script>
<template>
<div>
<h1>{{ $options.i18n.pageTitle }}</h1>
</div>
</template>

View File

@ -0,0 +1,18 @@
import Vue from 'vue';
import ImportDetailsApp from './components/import_details_app.vue';
export default () => {
const el = document.querySelector('.js-import-details');
if (!el) {
return null;
}
return new Vue({
el,
name: 'ImportDetailsRoot',
render(createElement) {
return createElement(ImportDetailsApp);
},
});
};

View File

@ -170,6 +170,7 @@ function createApolloClient(resolvers = {}, config = {}) {
config: {
url: httpResponse.url,
operationName: operation.operationName,
method: operation.getContext()?.fetchOptions?.method || 'POST', // If method is not explicitly set, we default to POST request
},
headers: {
'x-request-id': httpResponse.headers.get('x-request-id'),

View File

@ -0,0 +1,3 @@
import initImportDetails from '~/import/details';
initImportDetails();

View File

@ -1,4 +1,5 @@
<script>
import { GlLink } from '@gitlab/ui';
import SafeHtml from '~/vue_shared/directives/safe_html';
import { glEmojiTag } from '~/emoji';
import { mergeUrlParams } from '~/lib/utils/url_utility';
@ -12,6 +13,7 @@ export default {
components: {
AddRequest,
DetailedMetric,
GlLink,
RequestSelector,
},
directives: {
@ -30,6 +32,10 @@ export default {
type: String,
required: true,
},
requestMethod: {
type: String,
required: true,
},
peekUrl: {
type: String,
required: true,
@ -108,9 +114,6 @@ export default {
this.currentRequestId = requestId;
},
},
initialRequest() {
return this.currentRequestId === this.requestId;
},
hasHost() {
return this.currentRequest && this.currentRequest.details && this.currentRequest.details.host;
},
@ -135,6 +138,9 @@ export default {
showZoekt() {
return document.body.dataset.page === 'search:show';
},
showFlamegraphButtons() {
return this.currentRequest.details && this.isGetRequest(this.currentRequestId);
},
},
created() {
if (!this.showZoekt) {
@ -151,12 +157,15 @@ export default {
this.currentRequest = newRequestId;
this.$emit('change-request', newRequestId);
},
flamegraphPath(mode) {
flamegraphPath(mode, requestId) {
return mergeUrlParams(
{ performance_bar: 'flamegraph', stackprof_mode: mode },
window.location.href,
this.store.findRequest(requestId).fullUrl,
);
},
isGetRequest(requestId) {
return this.store.findRequest(requestId)?.method?.toUpperCase() === 'GET';
},
},
safeHtmlConfig: { ADD_TAGS: ['gl-emoji'] },
};
@ -192,41 +201,41 @@ export default {
id="peek-view-trace"
class="view"
>
<a class="gl-text-blue-200" :href="currentRequest.details.tracing.tracing_url">{{
<gl-link class="gl-text-blue-200" :href="currentRequest.details.tracing.tracing_url">{{
s__('PerformanceBar|Trace')
}}</a>
}}</gl-link>
</div>
<div v-if="currentRequest.details" id="peek-download" class="view">
<a class="gl-text-blue-200" :download="downloadName" :href="downloadPath">{{
<gl-link class="gl-text-blue-200" :download="downloadName" :href="downloadPath">{{
s__('PerformanceBar|Download')
}}</a>
}}</gl-link>
</div>
<div
v-if="currentRequest.details && env === 'development'"
id="peek-memory-report"
class="view"
>
<a class="gl-text-blue-200" :href="memoryReportPath">{{
<gl-link class="gl-text-blue-200" :href="memoryReportPath">{{
s__('PerformanceBar|Memory report')
}}</a>
}}</gl-link>
</div>
<div v-if="currentRequest.details" id="peek-flamegraph" class="view">
<div v-if="showFlamegraphButtons" id="peek-flamegraph" class="view">
<span class="gl-text-white-200">{{ s__('PerformanceBar|Flamegraph with mode:') }}</span>
<a class="gl-text-blue-200" :href="flamegraphPath('wall')">{{
<gl-link class="gl-text-blue-200" :href="flamegraphPath('wall', currentRequestId)">{{
s__('PerformanceBar|wall')
}}</a>
}}</gl-link>
/
<a class="gl-text-blue-200" :href="flamegraphPath('cpu')">{{
<gl-link class="gl-text-blue-200" :href="flamegraphPath('cpu', currentRequestId)">{{
s__('PerformanceBar|cpu')
}}</a>
}}</gl-link>
/
<a class="gl-text-blue-200" :href="flamegraphPath('object')">{{
<gl-link class="gl-text-blue-200" :href="flamegraphPath('object', currentRequestId)">{{
s__('PerformanceBar|object')
}}</a>
}}</gl-link>
</div>
<a v-if="statsUrl" class="gl-text-blue-200 view" :href="statsUrl">{{
<gl-link v-if="statsUrl" class="gl-text-blue-200 view" :href="statsUrl">{{
s__('PerformanceBar|Stats')
}}</a>
}}</gl-link>
<request-selector
v-if="currentRequest"
:current-request="currentRequest"

View File

@ -32,6 +32,7 @@ const initPerformanceBar = (el) => {
store,
env: performanceBarData.env,
requestId: performanceBarData.requestId,
requestMethod: performanceBarData.requestMethod,
peekUrl: performanceBarData.peekUrl,
profileUrl: performanceBarData.profileUrl,
statsUrl: performanceBarData.statsUrl,
@ -40,7 +41,13 @@ const initPerformanceBar = (el) => {
mounted() {
PerformanceBarService.registerInterceptor(this.peekUrl, this.addRequest);
this.addRequest(this.requestId, window.location.href);
this.addRequest(
this.requestId,
window.location.href,
undefined,
undefined,
this.requestMethod,
);
this.loadRequestDetails(this.requestId);
},
beforeDestroy() {
@ -56,12 +63,12 @@ const initPerformanceBar = (el) => {
this.addRequest(urlOrRequestId, urlOrRequestId);
}
},
addRequest(requestId, requestUrl, operationName) {
addRequest(requestId, requestUrl, operationName, requestParams, methodVerb) {
if (!this.store.canTrackRequest(requestUrl)) {
return;
}
this.store.addRequest(requestId, requestUrl, operationName);
this.store.addRequest(requestId, requestUrl, operationName, requestParams, methodVerb);
},
loadRequestDetails(requestId) {
const request = this.store.findRequest(requestId);
@ -145,6 +152,7 @@ const initPerformanceBar = (el) => {
store: this.store,
env: this.env,
requestId: this.requestId,
requestMethod: this.requestMethod,
peekUrl: this.peekUrl,
profileUrl: this.profileUrl,
statsUrl: this.statsUrl,

View File

@ -14,11 +14,13 @@ export default class PerformanceBarService {
fireCallback,
requestId,
requestUrl,
requestParams,
operationName,
methodVerb,
] = PerformanceBarService.callbackParams(response, peekUrl);
if (fireCallback) {
callback(requestId, requestUrl, operationName);
callback(requestId, requestUrl, operationName, requestParams, methodVerb);
}
return response;
@ -35,11 +37,14 @@ export default class PerformanceBarService {
static callbackParams(response, peekUrl) {
const requestId = response.headers && response.headers['x-request-id'];
const requestUrl = response.config?.url;
const requestParams = response.config?.params;
const methodVerb = response.config?.method;
const cachedResponse =
response.headers && parseBoolean(response.headers['x-gitlab-from-cache']);
const fireCallback = requestUrl !== peekUrl && Boolean(requestId) && !cachedResponse;
const operationName = response.config?.operationName;
return [fireCallback, requestId, requestUrl, operationName];
return [fireCallback, requestId, requestUrl, requestParams, operationName, methodVerb];
}
}

View File

@ -1,11 +1,19 @@
import { mergeUrlParams } from '~/lib/utils/url_utility';
export default class PerformanceBarStore {
constructor() {
this.requests = [];
}
addRequest(requestId, requestUrl, operationName) {
addRequest(requestId, requestUrl, operationName, requestParams, methodVerb) {
if (!this.findRequest(requestId)) {
let displayName = PerformanceBarStore.truncateUrl(requestUrl);
let displayName = '';
if (methodVerb) {
displayName += `${methodVerb.toUpperCase()} `;
}
displayName += PerformanceBarStore.truncateUrl(requestUrl);
if (operationName) {
displayName += ` (${operationName})`;
@ -14,6 +22,8 @@ export default class PerformanceBarStore {
this.requests.push({
id: requestId,
url: requestUrl,
fullUrl: mergeUrlParams(requestParams, requestUrl),
method: methodVerb,
details: {},
displayName,
});

View File

@ -32,30 +32,12 @@
}
@media (min-width: map-get($grid-breakpoints, md)) {
// The `+11` is to ensure the file header border shows when scrolled -
// the bottom of the compare-versions header and the top of the file header
--initial-top: calc(#{$header-height} + #{$mr-tabs-height});
--top: var(--initial-top);
position: -webkit-sticky;
position: sticky;
top: var(--top);
top: calc(#{$calc-application-header-height} + #{$mr-tabs-height});
z-index: 120;
&.is-sidebar-moved {
--initial-top: calc(#{$header-height} + #{$mr-tabs-height + 24px});
}
.with-system-header & {
--top: calc(var(--initial-top) + #{$system-header-height});
}
.with-system-header.with-performance-bar & {
--top: calc(var(--initial-top) + #{$system-header-height} + #{$performance-bar-height});
}
.with-performance-bar & {
top: calc(var(--initial-top) + #{$performance-bar-height});
top: calc(#{$calc-application-header-height} + #{$mr-tabs-height} + 24px);
}
&::before {
@ -70,19 +52,11 @@
}
&.is-commit {
top: calc(#{$header-height} + #{$commit-stat-summary-height});
.with-performance-bar & {
top: calc(#{$header-height} + #{$commit-stat-summary-height} + #{$performance-bar-height});
}
top: calc(#{$calc-application-header-height} + #{$commit-stat-summary-height});
}
&.is-compare {
top: calc(#{$header-height} + #{$compare-branches-sticky-header-height});
.with-performance-bar & {
top: calc(#{$performance-bar-height} + #{$header-height} + #{$compare-branches-sticky-header-height});
}
top: calc(#{$calc-application-header-height} + #{$compare-branches-sticky-header-height});
}
}
@ -99,22 +73,7 @@
@media (min-width: map-get($grid-breakpoints, md)) {
&.conflict .file-title,
&.conflict .file-title-flex-parent {
top: $header-height;
}
.with-performance-bar &.conflict .file-title,
.with-performance-bar &.conflict .file-title-flex-parent {
top: calc(#{$header-height} + #{$performance-bar-height});
}
.with-system-header &.conflict .file-title,
.with-system-header &.conflict .file-title-flex-parent {
top: calc(#{$header-height} + #{$system-header-height});
}
.with-system-header.with-performance-bar &.conflict .file-title,
.with-system-header.with-performance-bar &.conflict .file-title-flex-parent {
top: calc(#{$header-height} + #{$performance-bar-height} + #{$system-header-height});
top: $calc-application-header-height;
}
}
@ -733,13 +692,9 @@ table.code {
@include media-breakpoint-up(sm) {
@include gl-sticky;
top: $header-height;
top: $calc-application-header-height;
z-index: 200;
.with-performance-bar & {
top: calc(#{$header-height} + #{$performance-bar-height});
}
&.is-stuck {
@include gl-py-0;
border-top: 1px solid $white-dark;

View File

@ -288,16 +288,13 @@
@mixin right-sidebar {
position: fixed;
top: $header-height;
// Default value for CSS var must contain a unit
// stylelint-disable-next-line length-zero-no-unit
bottom: var(--review-bar-height, 0px);
top: $calc-application-header-height;
bottom: calc(#{$calc-application-footer-height} + var(--mr-review-bar-height));
right: 0;
transition: width $gl-transition-duration-medium;
background-color: $white;
z-index: 200;
overflow: hidden;
}
.right-sidebar {
@ -469,28 +466,14 @@
padding: 0;
.issuable-context-form {
--initial-top: calc(#{$header-height} + 76px);
--top: var(--initial-top);
$issue-sticky-header-height: 76px;
@include gl-sticky;
@include gl-overflow-auto;
top: var(--top);
height: calc(100vh - var(--top));
top: calc(#{$calc-application-header-height} + #{$issue-sticky-header-height});
height: calc(#{$calc-application-viewport-height} - #{$issue-sticky-header-height} - var(--mr-review-bar-height));
position: sticky;
overflow: auto;
padding: 0 15px;
margin-bottom: calc(var(--top) * -1);
.with-performance-bar & {
--top: calc(var(--initial-top) + #{$performance-bar-height});
}
.with-system-header & {
--top: calc(var(--initial-top) + #{$system-header-height});
}
.with-performance-bar.with-system-header & {
--top: calc(var(--initial-top) + #{$system-header-height} + #{$performance-bar-height});
}
margin-bottom: calc((#{$header-height} + $issue-sticky-header-height) * -1);
}
}
}
@ -742,10 +725,6 @@
}
}
.with-performance-bar .right-sidebar {
top: calc(#{$header-height} + #{$performance-bar-height});
}
.issuable-show-labels {
.gl-label {
margin-bottom: 5px;

View File

@ -46,9 +46,7 @@
}
// left sidebar eg: project page
// right sidebar eg: MR page
.nav-sidebar,
.right-sidebar {
.nav-sidebar {
top: calc(#{$system-header-height} + #{$header-height});
}
@ -62,9 +60,7 @@
}
// left sidebar eg: project page
// right sidebar eg: MR page
.nav-sidebar,
.right-sidebar {
.nav-sidebar {
top: calc(#{$header-height} + #{$performance-bar-height} + #{$system-header-height});
}
}
@ -73,10 +69,7 @@
// System Footer
.with-system-footer {
// left sidebar eg: project page
// right sidebar eg: mr page
.nav-sidebar,
.right-sidebar,
.review-bar-component,
// navless pages' footer eg: login page
// navless pages' footer border eg: login page
&.devise-layout-html body .footer-container,

View File

@ -255,21 +255,19 @@ $tabs-holder-z-index: 250;
// If they don't match, the file tree and the diff files stick
// to the top at different heights, which is a bad-looking defect
$diff-file-header-top: 11px;
--initial-pos: calc(#{$header-height} + #{$mr-tabs-height} + #{$diff-file-header-top});
--top-pos: var(--initial-pos);
position: -webkit-sticky;
position: sticky;
top: calc(var(--top-pos) + var(--performance-bar-height, 0px));
top: calc(#{$calc-application-header-height} + #{$mr-tabs-height} + #{$diff-file-header-top});
min-height: 300px;
height: calc(100vh - var(--top-pos) - var(--system-header-height, 0px) - var(--performance-bar-height, 0px) - var(--mr-review-bar-height, 0px));
height: calc(#{$calc-application-viewport-height} - (#{$mr-tabs-height} + #{$diff-file-header-top}));
.drag-handle {
bottom: 16px;
}
&.is-sidebar-moved {
--top-pos: calc(var(--initial-pos) + 26px);
height: calc(#{$calc-application-viewport-height} - (#{$mr-tabs-height} + #{$diff-file-header-top} + 26px));
top: calc(#{$calc-application-header-height} + #{$mr-tabs-height} + #{$diff-file-header-top} + 26px);
}
}
@ -1138,7 +1136,7 @@ $tabs-holder-z-index: 250;
.review-bar-component {
position: fixed;
bottom: 0;
bottom: $calc-application-footer-height;
left: 0;
z-index: $zindex-dropdown-menu;
display: flex;

View File

@ -252,7 +252,7 @@ ul.related-merge-requests > li gl-emoji {
@include gl-left-0;
width: var(--width);
top: $header-height;
top: $calc-application-header-height;
// collapsed right sidebar
@include media-breakpoint-up(sm) {
@ -266,10 +266,6 @@ ul.related-merge-requests > li gl-emoji {
}
}
.with-performance-bar .issue-sticky-header {
top: calc(#{$header-height} + #{$performance-bar-height});
}
@include media-breakpoint-up(md) {
// collapsed left sidebar + collapsed right sidebar
.page-with-contextual-sidebar .issue-sticky-header {

View File

@ -209,19 +209,11 @@ $comparison-empty-state-height: 62px;
}
.merge-request-tabs-holder {
top: $header-height;
top: $calc-application-header-height;
z-index: $tabs-holder-z-index;
background-color: $body-bg;
border-bottom: 1px solid $border-color;
.with-system-header & {
top: calc(#{$header-height} + #{$system-header-height});
}
.with-system-header.with-performance-bar & {
top: calc(#{$header-height} + #{$system-header-height} + #{$performance-bar-height});
}
@include media-breakpoint-up(md) {
position: sticky;
}
@ -240,12 +232,6 @@ $comparison-empty-state-height: 62px;
}
}
.with-performance-bar {
.merge-request-tabs-holder {
top: calc(#{$header-height} + #{$performance-bar-height});
}
}
.limit-container-width {
.merge-request-tabs-container {
max-width: $limited-layout-width;
@ -336,11 +322,7 @@ $comparison-empty-state-height: 62px;
.mr-compare {
.diff-file .file-title-flex-parent {
top: calc(#{$header-height} + #{$mr-tabs-height});
.with-performance-bar & {
top: calc(#{$performance-bar-height} + #{$header-height} + #{$mr-tabs-height});
}
top: calc(#{$calc-application-header-height} + #{$mr-tabs-height});
}
}

View File

@ -500,18 +500,6 @@ $system-note-icon-m-left: $avatar-m-left + $icon-size-diff / $avatar-m-ratio;
border-radius: 0;
margin-left: 2.5rem;
@media (min-width: map-get($grid-breakpoints, md)) {
--initial-top: calc(#{$header-height} + #{$mr-tabs-height});
&.is-sidebar-moved {
--initial-top: calc(#{$header-height} + #{$mr-tabs-height + 24px});
}
.with-performance-bar & {
--top: 123px;
}
}
&:hover {
background-color: $gray-light;
}

View File

@ -623,6 +623,7 @@ html {
--performance-bar-height: 0px;
--system-header-height: 0px;
--system-footer-height: 0px;
--mr-review-bar-height: 0px;
}
.gl-font-sm {
font-size: 12px;

View File

@ -623,6 +623,7 @@ html {
--performance-bar-height: 0px;
--system-header-height: 0px;
--system-footer-height: 0px;
--mr-review-bar-height: 0px;
}
.gl-font-sm {
font-size: 12px;

View File

@ -668,6 +668,12 @@ body.navless {
.btn-block.btn {
padding: 6px 0;
}
:root {
--performance-bar-height: 0px;
--system-header-height: 0px;
--system-footer-height: 0px;
--mr-review-bar-height: 0px;
}
.tab-content {
overflow: visible;
}

View File

@ -67,6 +67,10 @@ class Import::GithubController < Import::BaseController
end
end
def details
render_404 unless Feature.enabled?(:import_details_page)
end
def create
result = Import::GithubService.new(client, current_user, import_params).execute(access_params, provider_name)

View File

@ -15,7 +15,6 @@ module Resolvers
return unless project.forked?
return unless authorized_fork_source?
return unless project.repository.branch_exists?(args[:ref])
return unless Feature.enabled?(:fork_divergence_counts, project)
::Projects::Forks::Details.new(project, args[:ref])
end

View File

@ -11,6 +11,10 @@ module Admin
inactive_projects_send_warning_email_after_months: settings.inactive_projects_send_warning_email_after_months
}
end
def project_missing_pipeline_yaml?(project)
project.repository&.gitlab_ci_yml.blank?
end
end
end
end

View File

@ -72,9 +72,8 @@ class Packages::Package < ApplicationRecord
scope: %i[project_id version package_type],
conditions: -> { not_pending_destruction }
},
unless: -> { pending_destruction? || conan? || debian_package? }
unless: -> { pending_destruction? || conan? }
validate :unique_debian_package_name, if: :debian_package?
validate :valid_conan_package_recipe, if: :conan?
validate :valid_composer_global_name, if: :composer?
validate :npm_package_already_taken, if: :npm?
@ -224,6 +223,12 @@ class Packages::Package < ApplicationRecord
find_by!(name: name, version: version)
end
def self.existing_debian_packages_with(name:, version:)
debian.with_name(name)
.with_version(version)
.not_pending_destruction
end
def self.pluck_names
pluck(:name)
end
@ -418,19 +423,6 @@ class Packages::Package < ApplicationRecord
project.root_namespace.path == ::Packages::Npm.scope_of(name)
end
def unique_debian_package_name
return unless debian_publication&.distribution
package_exists = debian_publication.distribution.packages
.with_name(name)
.with_version(version)
.not_pending_destruction
.id_not_in(id)
.exists?
errors.add(:base, _('Debian package already exists in Distribution')) if package_exists
end
def forbidden_debian_changes
return unless persisted?

View File

@ -6,13 +6,19 @@ module Packages
include Gitlab::Utils::StrongMemoize
def execute
package = project.packages
.debian
.with_name(params[:name])
.with_version(params[:version])
.with_debian_codename_or_suite(params[:distribution_name])
.not_pending_destruction
.first
packages = project.packages
.existing_debian_packages_with(name: params[:name], version: params[:version])
package = packages.with_debian_codename_or_suite(params[:distribution_name]).first
unless package
package_in_other_distribution = packages.first
if package_in_other_distribution
raise ArgumentError, "Debian package #{params[:name]} #{params[:version]} exists " \
"in distribution #{package_in_other_distribution.debian_distribution.codename}"
end
end
package ||= create_package!(
:debian,
@ -25,13 +31,12 @@ module Packages
private
def distribution
strong_memoize(:distribution) do
Packages::Debian::DistributionsFinder.new(
project,
codename_or_suite: params[:distribution_name]
).execute.last!
end
Packages::Debian::DistributionsFinder.new(
project,
codename_or_suite: params[:distribution_name]
).execute.last!
end
strong_memoize_attr :distribution
end
end
end

View File

@ -6,7 +6,7 @@ module Packages
include ExclusiveLeaseGuard
include Gitlab::Utils::StrongMemoize
SOURCE_FIELD_SPLIT_REGEX = /[ ()]/.freeze
SOURCE_FIELD_SPLIT_REGEX = /[ ()]/
# used by ExclusiveLeaseGuard
DEFAULT_LEASE_TIMEOUT = 1.hour.to_i.freeze
@ -54,14 +54,21 @@ module Packages
strong_memoize_attr :file_metadata
def package
package = temp_package.project
.packages
.debian
.with_name(package_name)
.with_version(package_version)
.with_debian_codename_or_suite(@distribution_name)
.not_pending_destruction
.last
packages = temp_package.project
.packages
.existing_debian_packages_with(name: package_name, version: package_version)
package = packages.with_debian_codename_or_suite(@distribution_name)
.first
unless package
package_in_other_distribution = packages.first
if package_in_other_distribution
raise ArgumentError, "Debian package #{package_name} #{package_version} exists " \
"in distribution #{package_in_other_distribution.debian_distribution.codename}"
end
end
package || temp_package
end
strong_memoize_attr :package

View File

@ -0,0 +1,4 @@
- add_to_breadcrumbs _('Create a new project'), new_project_path
- page_title s_('Import|GitHub import details')
.js-import-details

View File

@ -3,5 +3,6 @@
#js-peek{ data: { env: Peek.env,
request_id: peek_request_id,
stats_url: ENV.fetch('GITLAB_PERFORMANCE_BAR_STATS_URL', ''),
peek_url: "#{peek_routes_path}/results" },
peek_url: "#{peek_routes_path}/results",
request_method: request.method, },
class: Peek.env }

View File

@ -15,8 +15,7 @@
.nav-block.gl-display-flex.gl-xs-flex-direction-column.gl-align-items-stretch
= render 'projects/tree/tree_header', tree: @tree, is_project_overview: is_project_overview
- if project.forked? && Feature.enabled?(:fork_divergence_counts, @project.fork_source)
- if project.forked?
#js-fork-info{ data: vue_fork_divergence_data(project, ref) }
- if is_project_overview

View File

@ -55,15 +55,6 @@
%button.btn.gl-button.btn-blank.btn-link.js-read-more-trigger.d-lg-none{ type: "button" }
= _("Read more")
- if @project.forked? && Feature.disabled?(:fork_divergence_counts, @project.fork_source)
%p
- source = visible_fork_source(@project)
- if source
#{ s_('ForkedFromProjectPath|Forked from') }
= link_to source.full_name, project_path(source), data: { qa_selector: 'forked_from_link' }
- else
= s_('ForkedFromProjectPath|Forked from an inaccessible project.')
= render_if_exists "projects/home_mirror"
- if @project.badges.present?

View File

@ -13,8 +13,7 @@
#js-code-owners{ data: { blob_path: blob.path, project_path: @project.full_path, branch: @ref } }
= render "projects/blob/auxiliary_viewer", blob: blob
- if project.forked? && Feature.enabled?(:fork_divergence_counts, @project.fork_source)
- if project.forked?
#js-fork-info{ data: vue_fork_divergence_data(project, ref) }
#blob-content-holder.blob-content-holder.js-per-page{ data: { blame_per_page: Projects::BlameService::PER_PAGE } }

View File

@ -10,6 +10,6 @@
%p.form-text.text-muted
= s_('ProjectSettings|Leave empty to use default template.')
= sprintf(s_('ProjectSettings|Maximum %{maxLength} characters.'), { maxLength: Issue::MAX_BRANCH_TEMPLATE })
- branch_name_help_link = help_page_path('user/project/merge_requests/creating_merge_requests.md', anchor: 'from-an-issue')
- branch_name_help_link = help_page_path('user/project/repository/branches/index.md', anchor: 'name-your-branch')
= link_to _('What variables can I use?'), branch_name_help_link, target: "_blank"
= render_if_exists 'projects/branch_defaults/branch_names_help'

View File

@ -1,8 +1,8 @@
---
name: fork_divergence_counts
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/103814
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/382878
milestone: '15.7'
name: import_details_page
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/116090
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/397650
milestone: '15.11'
type: development
group: group::source code
group: group::import
default_enabled: false

View File

@ -1,40 +1,5 @@
# frozen_string_literal: true
require 'gettext_i18n_rails_js/parser/javascript'
require 'json'
module GettextI18nRailsJs
module Parser
module Javascript
# This is required to tell the `rake gettext:find` script to use the Javascript
# parser for *.vue files.
#
# Overwrites: https://github.com/webhippie/gettext_i18n_rails_js/blob/46c58db6d2053a4f5f36a0eb024ea706ff5707cb/lib/gettext_i18n_rails_js/parser/javascript.rb#L36
def target?(file)
[
".js",
".jsx",
".vue"
].include? ::File.extname(file)
end
def collect_for(file)
gettext_messages_by_file[file] || []
end
private
def gettext_messages_by_file
@gettext_messages_by_file ||= Gitlab::Json.parse(load_messages)
end
def load_messages
`node scripts/frontend/extract_gettext_all.js --all`
end
end
end
end
class PoToJson
# This is required to modify the JS locale file output to our import needs
# Overwrites: https://github.com/webhippie/po_to_json/blob/master/lib/po_to_json.rb#L46

View File

@ -19,6 +19,7 @@ namespace :import do
resource :github, only: [:create, :new], controller: :github do
post :personal_access_token
get :status
get :details
get :callback
get :realtime_changes
post :cancel

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class AddTmpUniquePackagesIndexWhenDebian < Gitlab::Database::Migration[2.1]
TABLE_NAME = :packages_packages
PACKAGE_TYPE_DEBIAN = 9
PACKAGE_STATUS_PENDING_DESTRUCTION = 4
TMP_DEBIAN_UNIQUE_INDEX_NAME = 'tmp_unique_packages_project_id_and_name_and_version_when_debian'
disable_ddl_transaction!
def up
# This index will disallow further duplicates while we're deduplicating the data.
add_concurrent_index TABLE_NAME, [:project_id, :name, :version],
where: "package_type = #{PACKAGE_TYPE_DEBIAN} AND status != #{PACKAGE_STATUS_PENDING_DESTRUCTION} AND
created_at > TIMESTAMP WITH TIME ZONE '#{Time.now.utc}'",
unique: true,
name: TMP_DEBIAN_UNIQUE_INDEX_NAME
end
def down
remove_concurrent_index_by_name TABLE_NAME, TMP_DEBIAN_UNIQUE_INDEX_NAME
end
end

View File

@ -0,0 +1,48 @@
# frozen_string_literal: true
class EnsureUniqueDebianPackages < Gitlab::Database::Migration[2.1]
BATCH_SIZE = 1_000
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
class Package < MigrationRecord
include EachBatch
self.table_name = 'packages_packages'
enum package_type: { debian: 9 }
enum status: { pending_destruction: 4 }
end
def up
Package.distinct_each_batch(column: :project_id) do |package_projects|
project_ids = package_projects.pluck(:project_id)
duplicates = Package.debian
.not_pending_destruction
.where(project_id: project_ids)
.select('project_id, name, version, MAX(id) as last_id')
.group(:project_id, :name, :version)
.having('count(id) > 1')
loop do
duplicates.limit(BATCH_SIZE).each do |duplicate|
Package.debian
.not_pending_destruction
.where(
project_id: duplicate.project_id,
name: duplicate.name,
version: duplicate.version,
id: ..duplicate.last_id - 1
).update_all status: :pending_destruction
end
break unless duplicates.exists?
end
end
end
def down
# nothing to do
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
class AddUniquePackagesIndexWhenDebian < Gitlab::Database::Migration[2.1]
TABLE_NAME = :packages_packages
PACKAGE_TYPE_DEBIAN = 9
PACKAGE_STATUS_PENDING_DESTRUCTION = 4
TMP_DEBIAN_UNIQUE_INDEX_NAME = 'tmp_unique_packages_project_id_and_name_and_version_when_debian'
DEBIAN_UNIQUE_INDEX_NAME = 'unique_packages_project_id_and_name_and_version_when_debian'
disable_ddl_transaction!
def up
add_concurrent_index TABLE_NAME, [:project_id, :name, :version],
where: "package_type = #{PACKAGE_TYPE_DEBIAN} AND status != #{PACKAGE_STATUS_PENDING_DESTRUCTION}",
unique: true,
name: DEBIAN_UNIQUE_INDEX_NAME
remove_concurrent_index_by_name TABLE_NAME, TMP_DEBIAN_UNIQUE_INDEX_NAME
end
def down
# This index will disallow further duplicates while we're deduplicating the data.
add_concurrent_index TABLE_NAME, [:project_id, :name, :version],
where: "package_type = #{PACKAGE_TYPE_DEBIAN} AND status != #{PACKAGE_STATUS_PENDING_DESTRUCTION} AND
created_at > TIMESTAMP WITH TIME ZONE '#{Time.now.utc}'",
unique: true,
name: TMP_DEBIAN_UNIQUE_INDEX_NAME
remove_concurrent_index_by_name TABLE_NAME, DEBIAN_UNIQUE_INDEX_NAME
end
end

View File

@ -0,0 +1 @@
5b9e269f3354b1f054a3e3d8f9a011ea2c1a19c72a4cecdfb8a3e6e80ab83ab3

View File

@ -0,0 +1 @@
a33f941480ce270532dca961cf72ce31c5c2d732b1c1ab361c95bac6e278a443

View File

@ -0,0 +1 @@
238730785306bb43677be1e86ae3cca210d651eb5397a7fbf90a12a7ec252614

View File

@ -32838,6 +32838,8 @@ CREATE UNIQUE INDEX unique_index_for_project_pages_unique_domain ON project_sett
CREATE UNIQUE INDEX unique_merge_request_metrics_by_merge_request_id ON merge_request_metrics USING btree (merge_request_id);
CREATE UNIQUE INDEX unique_packages_project_id_and_name_and_version_when_debian ON packages_packages USING btree (project_id, name, version) WHERE ((package_type = 9) AND (status <> 4));
CREATE UNIQUE INDEX unique_postgres_async_fk_validations_name_and_table_name ON postgres_async_foreign_key_validations USING btree (name, table_name);
CREATE UNIQUE INDEX unique_projects_on_name_namespace_id ON projects USING btree (name, namespace_id);

View File

@ -265,8 +265,5 @@ To upgrade to a later version [using your own web-server](#self-host-the-product
If you self-host the product documentation:
- The version dropdown list displays additional versions that don't exist. Selecting
these versions displays a `404 Not Found` page.
- The search displays results from `docs.gitlab.com` and not the local site.
- By default, the landing page redirects to the
respective version (for example, `/14.5/`). This causes the landing page <https://docs.gitlab.com> to not be displayed.

View File

@ -7,15 +7,56 @@ type: concepts
# Security report ingestion overview
## Definitions
WARNING:
The `Vulnerability::Feedback` model is currently undergoing deprecation and should be actively avoided in all further development. It is currently maintained with feature parity to enable revert should any issues arise, but is intended to be removed in 16.0. Any interactions relating to the Feedback model are superseded by the `StateTransition`, `IssueLink`, and `MergeRequestLink` models. You can find out more on [in this epic](https://gitlab.com/groups/gitlab-org/-/epics/5629).
- **Vulnerability Finding** an instance of `Vulnerabilities::Finding` class. This class was previously called `Vulnerabilities::Occurrence`; after renaming the class, we kept the associated table name `vulnerability_occurrences` due to the effort involved in renaming large tables.
- **Vulnerability** an instance of `Vulnerability` class. They are created based on information available in `Vulnerabilities::Finding` class. Every `Vulnerability` **must have** a corresponding `Vulnerabilities::Finding` object to be valid, however this is not enforced at the database level.
- **Security Finding** an instance of `Security::Finding` class. They store **partial** finding data to improve performance of the pipeline security report. We are working on extending this class to store almost all required information so we can stop relying on job artifacts.
- **Feedback** an instance of `Vulnerabilities::Feedback` class. They are created to keep track of users' interactions with Vulnerability Findings before they are promoted to a Vulnerability. We are in the process of removing this model via [Deprecate and remove Vulnerabilities::Feedback epic](https://gitlab.com/groups/gitlab-org/-/epics/5629).
- **Issue Link** an instance of `Vulnerabilities::IssueLink` class. They are used to link `Vulnerability` objects to `Issue` objects.
## Commonly used terms
## Vulnerability creation from security reports
### Feedback
An instance of `Vulnerabilities::Feedback` class. They are created to keep track of users' interactions with Vulnerability Findings before they are promoted to a Vulnerability. This model is deprecated and due to be removed by GitLab 16.0 as part of the [Deprecate and remove Vulnerabilities::Feedback epic](https://gitlab.com/groups/gitlab-org/-/epics/5629).
### Issue Link
An instance of `Vulnerabilities::IssueLink` class. They are used to link `Vulnerability` records to `Issue` records.
### Merge Request Link
An instance of `Vulnerabilities::MergeRequestLink` class. They are used to link `Vulnerability` records to `MergeRequest` records.
### Security Finding
An instance of `Security::Finding` class. These serve as a meta-data store of a specific vulnerability detected in a specific `Security::Scan`. They currently store **partial** finding data to improve performance of the pipeline security report. This class has been extended to store almost all required scan information so we can stop relying on job artifacts and is [due to be used in favor of `Vulnerability::Findings` soon.](https://gitlab.com/gitlab-org/gitlab/-/issues/393394)
### Security Scan
An instance of the `Security::Scan` class. Security scans are representative of a `Ci::Build` which output a `Job Artifact` which has been output as a security scan result, which GitLab acknowledges and ingests the findings of as `Security::Finding` records.
### State Transition
An instance of the `Vulnerabilities::StateTransition` class. This model represents a state change of a respecitve Vulnerability record, for example the dismissal of a vulnerability which has been determined to be safe.
### Vulnerability
An instance of `Vulnerability` class. A `Vulnerability` is representative of a `Vulnerability::Finding` which has been detected in the default branch of the project, or if the `present_on_default_branch` flag is false, is representative of a finding which has been interacted with in some way outside of the default branch, such as if it is dismissed (`State Transition`), or linked to an `Issue` or `Merge Request`. They are created based on information available in `Vulnerabilities::Finding` class. Every `Vulnerability` **must have** a corresponding `Vulnerabilities::Finding` object to be valid, however this is not enforced at the database level.
### Finding
An instance of `Vulnerabilities::Finding` class. A `Vulnerability::Finding` is a database only representation of a security finding which has been merged into the default branch of a project, as the same `Vulnerability` may be present in multiple places within a project. This class was previously called `Vulnerabilities::Occurrence`; after renaming the class, we kept the associated table name `vulnerability_occurrences` due to the effort involved in renaming large tables.
### Identifier
An instance of the `Vulnerabilities::Identifier` class. Each vulnerability is given a unique identifier that can be derived from it's finding, enabling multiple Findings of the same `Vulnerability` to be correlated accordingly.
### Vulnerability Read
An instance of the `Vulnerabilities::Read` class. This is a denormalised record of `Vulnerability` and `Vulnerability::Finding` data to improve performance of filtered querying of vulnerability data to the front end.
### Remediation
An instance of the `Vulnerabilities::Remediation` class. A remediation is representative of a known solution to a detected `Vulnerability`. These enable GitLab to recommend a change to resolve a specific `Vulnerability`.
## Vulnerability creation from Security Reports
Assumptions:
@ -24,23 +65,23 @@ Assumptions:
- No Vulnerabilities are present in the database
- All pipelines perform security scans
1. Code is pushed to a branch that's **not** the default branch.
### Scan runs in a pipeline for a non-default branch
1. Code is pushed to the branch.
1. GitLab CI runs a new pipeline for that branch.
1. Pipeline status transitions to any of [`::Ci::Pipeline.completed_statuses`](https://gitlab.com/gitlab-org/gitlab/-/blob/354261b2fe4fc5b86d1408467beadd90e466ce0a/app/models/concerns/ci/has_status.rb#L12).
1. `Security::StoreScansWorker` is called and it schedules `Security::StoreScansService`.
1. `Security::StoreScansService` calls `Security::StoreGroupedScansService` and schedules `ScanSecurityReportSecretsWorker`.
1. `Security::StoreGroupedScansService` calls `Security::StoreScanService`.
1. `Security::StoreScanService` calls `Security::StoreFindingsService`.
1. `ScanSecurityReportSecretsWorker` calls `Security::TokenRevocationService` to revoke any leaked keys.
1. At this point we have `Security::Finding` objects **only**.
1. `ScanSecurityReportSecretsWorker` calls `Security::TokenRevocationService` to automatically revoke any leaked keys that were detected.
1. At this point we **only** have `Security::Finding` records as these findings are not present in the default branch of the project.
At this point, the following things can happen to the `Security::Finding`:
At this point, the following things can happen to the `Security::Finding` which would result in its promotion to a `Vulnerability::Finding` with a respective `Vulnerability` record:
- Dismissal
- Issue creation
- Promotion to a Vulnerability
### Scan runs in a pipeline for the default branch
If the pipeline ran on the default branch then the following, additional steps are done:
If the pipeline ran on the default branch then the following, then in addition to the steps in [#Scan-is-executed-again-a-non-default-branch], these additional steps are executed:
1. `Security::StoreScansService` gets called and schedules `StoreSecurityReportsWorker`.
1. `StoreSecurityReportsWorker` executes `Security::Ingestion::IngestReportsService`.
@ -49,27 +90,28 @@ If the pipeline ran on the default branch then the following, additional steps a
### Dismissal
If you select `Dismiss vulnerability`, a Feedback is created. You can also dismiss it with a comment.
If you change the state of a vulnerability, such as selecting `Dismiss vulnerability` the following things currently happen:
#### After Feedback removal
- A `Feedback` record of `dismissal` type is created to record the current state.
- If they do not already exist, a `Vulnerability Finding` and a `Vulnerability` with `present_on_default_branch: false` attribute get created, to which a `State Transition` reflecting the state change is related.
If there is only a Security Finding, a Vulnerability Finding and a Vulnerability get created. At the same time we create a `Vulnerabilities::StateTransition` record to indicate the Vulnerability was dismissed.
You can optionally add a comment to the state change which is recorded on both the `Feedback` and the `State Transition`.
### Issue creation
### Issue or Merge Request creation
If you select `Create issue`, a Vulnerabilities::Feedback record is created as well. The Feedback has a different `feedback_type` and an `issue_id` that's not `NULL`.
If you select `Create issue` or `Create merge request` the following things currently happen:
NOTE:
Vulnerabilities::Feedback are in the process of being [deprecated](https://gitlab.com/groups/gitlab-org/-/epics/5629). This will later create a `Vulnerabilities::IssueLink` record.
- A `Vulnerabilities::Feedback` record is created. The Feedback will have a `feedback_type` of `issue` or `merge request` and an `issue_id` or `merge_request_id` that's not `NULL` respective to the attachment.
- If they do not already exist, a `Vulnerability Finding` and a `Vulnerability` with `present_on_default_branch: false` attribute get created, to which a `Issue Link` or `Merge Request Link` will be related respective to the action taken.
#### After Feedback removal
## Vulnerabilities in the Default Branch
If there's only a Security Finding, a Vulnerability Finding and a Vulnerability gets created. At the same time, we create an Issue and a Issue Link.
Security Findings detected in scan run on the default branch are saved as `Vulnerabilities` with the `present_on_default_branch: true` attribute and respective `Vulnerability Finding` records. `Vulnerability` records that already exist from interactions outside of the default branch will be updated to `present_on_default_branch: true`
## Promotion to a Vulnerability
`Vulnerabilities` which have already been interacted with will retain all existing `State Transitions`, `Merge Request Links` and `Issue Links`, as well as a corresponding `Vulnerability Feedback`.
If the branch with a Security Finding gets merged into the default branch, all Security Findings get promoted into Vulnerabilities. Promotion is the process of creating Vulnerability Findings and Vulnerability records from those Security Findings.
## Vulnerability Read Creation
If there's a dismissal Feedback present for that Security Finding, the created Vulnerability is marked as dismissed.
`Vulnerability::Read` records are created via postgres database trigger upon the creation of a `Vulnerability::Finding` record and as such are part of our ingestion process, though they have no impact on it bar it's denormalization performance benefits on the report pages.
If there's an issue Feedback present for that Security Finding, we also create an Issue Link for that Vulnerability.
This style of creation was intended to be fast and seamless, but has proven difficult to debug and maintain and may be [migrated to the application layer later](https://gitlab.com/gitlab-org/gitlab/-/issues/393912).

View File

@ -4,14 +4,14 @@ group: Anti-Abuse
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Email verification **(FREE)**
# Account email verification **(FREE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/86352) in GitLab 15.2 [with a flag](../administration/feature_flags.md) named `require_email_verification`. Disabled by default.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available, ask an administrator to [enable the feature flag](../administration/feature_flags.md) named `require_email_verification`. On GitLab.com, this feature is not available.
Email verification provides an additional layer of GitLab account security.
Account email verification provides an additional layer of GitLab account security.
When certain conditions are met, an account is locked. If your account is locked,
you must verify your identity or reset your password to sign in to GitLab.

View File

@ -5,7 +5,7 @@ group: Authentication and Authorization
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# User email confirmation at sign-up **(FREE SELF)**
# Make new users confirm email **(FREE SELF)**
GitLab can be configured to require confirmation of a user's email address when
the user signs up. When this setting is enabled, the user is unable to sign in until

View File

@ -154,6 +154,11 @@ These deployment records are not created for pull-based deployments, for example
To track DORA metrics in these cases, you can [create a deployment record](../../api/deployments.md#create-a-deployment) using the Deployments API. See also the documentation page for [Track deployments of an external deployment tool](../../ci/environments/external_deployment_tools.md).
### Measure DORA metrics with Jira
- Deployment frequency and Lead time for changes are calculated based on GitLab CI/CD and Merge Requests (MRs), and do not require Jira data.
- Time to restore service and Change failure rate require GitLab incidents for the calculation. For more information, see [Measure DORA Time to restore service and Change failure rate with external incidents](#measure-dora-time-to-restore-service-and-change-failure-rate-with-external-incidents).
### Measure DORA Time to restore service and Change failure rate with external incidents
[Time to restore service](#time-to-restore-service) and [Change failure rate](#change-failure-rate)

View File

@ -4,7 +4,7 @@ group: Anti-Abuse
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Moderate users **(FREE SELF)**
# Moderate users **(FREE)**
> [Introduced](https://gitlab.com/gitlab-org/modelops/anti-abuse/team-tasks/-/issues/155) in GitLab 15.8.

View File

@ -44,14 +44,14 @@ For example:
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
## View a file's Code Owner
## View Code Owners of a file or directory
To view the Code Owners for a file:
To view the Code Owners of a file or directory:
1. On the top bar, select **Main menu > Projects** and find your project.
1. On the left sidebar, select **Repository > Files**.
1. Go to the file or directory you want to see the Code Owners for.
1. Optional. Select your desired branch.
1. Optional. Select a branch or tag.
GitLab shows the Code Owners at the top of the page.

View File

@ -11,24 +11,25 @@ namespace :gettext do
end
desc 'Regenerate gitlab.pot file'
task regenerate: ['gettext:setup'] do
task :regenerate do
require_relative "../../tooling/lib/tooling/gettext_extractor"
ensure_locale_folder_presence!
# remove the `pot` file to ensure it's completely regenerated
FileUtils.rm_f(pot_file_path)
Rake::Task['gettext:pot:create'].invoke
extractor = Tooling::GettextExtractor.new(
glob_base: Rails.root
)
File.write(pot_file_path, extractor.generate_pot)
raise 'gitlab.pot file not generated' unless File.exist?(pot_file_path)
# Remove timestamps from the pot file
pot_content = File.read pot_file_path
pot_content.gsub!(/^"POT?-(?:Creation|Revision)-Date:.*\n/, '')
File.write pot_file_path, pot_content
puts <<~MSG
All done. Please commit the changes to `locale/gitlab.pot`.
Tip: For even faster regeneration, directly run the following command:
tooling/bin/gettext_extractor locale/gitlab.pot
MSG
end
@ -74,7 +75,7 @@ namespace :gettext do
raise <<~MSG
Changes in translated strings found, please update file `#{pot_file_path}` by running:
bin/rake gettext:regenerate
tooling/bin/gettext_extractor locale/gitlab.pot
Then commit and push the resulting changes to `#{pot_file_path}`.
@ -87,17 +88,6 @@ namespace :gettext do
private
# Customize list of translatable files
# See: https://github.com/grosser/gettext_i18n_rails#customizing-list-of-translatable-files
def files_to_translate
folders = %W(ee app lib config #{locale_path}).join(',')
exts = %w(rb erb haml slim rhtml js jsx vue handlebars hbs mustache).join(',')
Dir.glob(
"{#{folders}}/**/*.{#{exts}}"
)
end
def report_errors_for_file(file, errors_for_file)
puts "Errors in `#{file}`:"

View File

@ -10835,6 +10835,9 @@ msgstr ""
msgid "ComplianceFramework|No compliance frameworks are set up yet"
msgstr ""
msgid "ComplianceFramework|No pipeline configuration found"
msgstr ""
msgid "ComplianceReport|Apply framework to selected projects"
msgstr ""
@ -13652,9 +13655,6 @@ msgstr ""
msgid "Dear Administrator,"
msgstr ""
msgid "Debian package already exists in Distribution"
msgstr ""
msgid "Debug"
msgstr ""
@ -22127,6 +22127,9 @@ msgstr ""
msgid "Importing..."
msgstr ""
msgid "Import|GitHub import details"
msgstr ""
msgid "Import|Partially completed"
msgstr ""
@ -27289,6 +27292,9 @@ msgstr ""
msgid "MergeRequest|Approved by @%{username}"
msgstr ""
msgid "MergeRequest|Can't fetch the diff needed to update this view. Please reload this page."
msgstr ""
msgid "MergeRequest|Can't fetch the single file diff for the discussion. Please reload this page."
msgstr ""
@ -27307,9 +27313,6 @@ msgstr ""
msgid "MergeRequest|Compare %{target} and %{source}"
msgstr ""
msgid "MergeRequest|Encountered an issue while trying to fetch the single file diff."
msgstr ""
msgid "MergeRequest|Error dismissing suggestion popover. Please try again."
msgstr ""
@ -50431,6 +50434,9 @@ msgstr ""
msgid "You see projects here when you're added to a group or project."
msgstr ""
msgid "You should add a %{linkStart}.gitlab-ci.yml%{linkEnd} file to this project to avoid pipeline failures. %{compliancePipelineLinkStart}Why?%{compliancePipelineLinkEnd}"
msgstr ""
msgid "You successfully declined the invitation"
msgstr ""

View File

@ -78,13 +78,17 @@ FactoryBot.define do
after :build do |package, evaluator|
if evaluator.published_in == :create
create(:debian_publication, package: package)
build(:debian_publication, package: package)
elsif !evaluator.published_in.nil?
create(:debian_publication, package: package, distribution: evaluator.published_in)
end
end
after :create do |package, evaluator|
if evaluator.published_in == :create
package.debian_publication.save!
end
unless evaluator.without_package_files
create :debian_package_file, :source, evaluator.file_metadatum_trait, package: package
create :debian_package_file, :dsc, evaluator.file_metadatum_trait, package: package

View File

@ -0,0 +1,23 @@
import { shallowMount } from '@vue/test-utils';
import ImportDetailsApp from '~/import/details/components/import_details_app.vue';
import { mockProject } from '../mock_data';
describe('Import details app', () => {
let wrapper;
const createComponent = () => {
wrapper = shallowMount(ImportDetailsApp, {
propsData: {
project: mockProject,
},
});
};
describe('template', () => {
it('renders heading', () => {
createComponent();
expect(wrapper.find('h1').text()).toBe(ImportDetailsApp.i18n.pageTitle);
});
});
});

View File

@ -0,0 +1,31 @@
export const mockProject = {
id: 26,
name: 'acl',
fullPath: '/root/acl',
fullName: 'Administrator / acl',
refsUrl: '/root/acl/refs',
importSource: 'namespace/acl',
importStatus: 'finished',
humanImportStatusName: 'finished',
providerLink: 'https://github.com/namespace/acl',
relationType: null,
stats: {
fetched: {
note: 1,
issue: 2,
label: 5,
collaborator: 2,
pullRequest: 1,
pullRequestMergedBy: 1,
},
imported: {
note: 1,
issue: 2,
label: 6,
collaborator: 3,
pullRequest: 1,
pullRequestMergedBy: 1,
pullRequestReviewRequest: 1,
},
},
};

View File

@ -1,20 +1,45 @@
import { shallowMount } from '@vue/test-utils';
import { mount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
import PerformanceBarApp from '~/performance_bar/components/performance_bar_app.vue';
import PerformanceBarStore from '~/performance_bar/stores/performance_bar_store';
describe('performance bar app', () => {
const store = new PerformanceBarStore();
const wrapper = shallowMount(PerformanceBarApp, {
store.addRequest('123', 'https://gitlab.com', '', {}, 'GET');
const wrapper = mount(PerformanceBarApp, {
propsData: {
store,
env: 'development',
requestId: '123',
requestMethod: 'GET',
statsUrl: 'https://log.gprd.gitlab.net/app/dashboards#/view/',
peekUrl: '/-/peek/results',
profileUrl: '?lineprofiler=true',
},
});
const flamegraphDiv = () => wrapper.find('#peek-flamegraph');
const flamegrapLinks = () => flamegraphDiv().findAllComponents(GlLink);
it('creates three flamegraph buttons based on the path', () => {
expect(flamegrapLinks()).toHaveLength(3);
['wall', 'cpu', 'object'].forEach((path, index) => {
expect(flamegrapLinks().at(index).attributes('href')).toBe(
`https://gitlab.com?performance_bar=flamegraph&stackprof_mode=${path}`,
);
});
expect(flamegrapLinks().at(0).attributes('href')).toEqual(
'https://gitlab.com?performance_bar=flamegraph&stackprof_mode=wall',
);
expect(flamegrapLinks().at(1).attributes('href')).toEqual(
'https://gitlab.com?performance_bar=flamegraph&stackprof_mode=cpu',
);
expect(flamegrapLinks().at(2).attributes('href')).toEqual(
'https://gitlab.com?performance_bar=flamegraph&stackprof_mode=object',
);
});
it('sets the class to match the environment', () => {
expect(wrapper.element.getAttribute('class')).toContain('development');
});

View File

@ -20,6 +20,7 @@ describe('performance bar wrapper', () => {
peekWrapper.setAttribute('id', 'js-peek');
peekWrapper.dataset.env = 'development';
peekWrapper.dataset.requestId = '123';
peekWrapper.dataset.requestMethod = 'GET';
peekWrapper.dataset.peekUrl = '/-/peek/results';
peekWrapper.dataset.statsUrl = 'https://log.gprd.gitlab.net/app/dashboards#/view/';
peekWrapper.dataset.profileUrl = '?lineprofiler=true';
@ -70,7 +71,13 @@ describe('performance bar wrapper', () => {
it('adds the request immediately', () => {
vm.addRequest('123', 'https://gitlab.com/');
expect(vm.store.addRequest).toHaveBeenCalledWith('123', 'https://gitlab.com/', undefined);
expect(vm.store.addRequest).toHaveBeenCalledWith(
'123',
'https://gitlab.com/',
undefined,
undefined,
undefined,
);
});
});

View File

@ -66,7 +66,7 @@ describe('PerformanceBarService', () => {
describe('operationName', () => {
function requestUrl(response, peekUrl) {
return PerformanceBarService.callbackParams(response, peekUrl)[3];
return PerformanceBarService.callbackParams(response, peekUrl)[4];
}
it('gets the operation name from response.config', () => {

View File

@ -0,0 +1,56 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
require_migration! 'add_unique_packages_index_when_debian'
require_migration! 'add_tmp_unique_packages_index_when_debian'
RSpec.describe EnsureUniqueDebianPackages, feature_category: :package_registry do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:packages) { table(:packages_packages) }
let!(:group) { namespaces.create!(name: 'group', path: 'group_path') }
let!(:project_namespace1) { namespaces.create!(name: 'name1', path: 'path1') }
let!(:project_namespace2) { namespaces.create!(name: 'name2', path: 'path2') }
let!(:project1) { projects.create!(namespace_id: group.id, project_namespace_id: project_namespace1.id) }
let!(:project2) { projects.create!(namespace_id: group.id, project_namespace_id: project_namespace2.id) }
let!(:debian_package1_1) do
packages.create!(project_id: project1.id, package_type: 9, name: FFaker::Lorem.word, version: 'v1.0')
end
let(:debian_package1_2) do
packages.create!(project_id: project1.id, package_type: 9, name: debian_package1_1.name,
version: debian_package1_1.version)
end
let!(:pypi_package1_3) do
packages.create!(project_id: project1.id, package_type: 5, name: debian_package1_1.name,
version: debian_package1_1.version)
end
let!(:debian_package2_1) do
packages.create!(project_id: project2.id, package_type: 9, name: debian_package1_1.name,
version: debian_package1_1.version)
end
before do
# Remove unique indices
AddUniquePackagesIndexWhenDebian.new.down
AddTmpUniquePackagesIndexWhenDebian.new.down
# Then create the duplicate packages
debian_package1_2
end
it 'marks as pending destruction the duplicated packages', :aggregate_failures do
expect { migrate! }
.to change { packages.where(status: 0).count }.from(4).to(3)
.and not_change { packages.where(status: 1).count }
.and not_change { packages.where(status: 2).count }
.and not_change { packages.where(status: 3).count }
.and change { packages.where(status: 4).count }.from(0).to(1)
end
end

View File

@ -682,24 +682,20 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
describe "#unique_debian_package_name" do
describe "uniqueness for package type debian" do
let!(:package) { create(:debian_package) }
it "will allow a Debian package with same project, name and version, but different distribution" do
new_package = build(:debian_package, project: package.project, name: package.name, version: package.version)
expect(new_package).to be_valid
end
it "will not allow a Debian package with same project, name, version and distribution" do
new_package = build(:debian_package, project: package.project, name: package.name, version: package.version)
new_package.debian_publication.distribution = package.debian_publication.distribution
expect(new_package).not_to be_valid
expect(new_package.errors.to_a).to include('Debian package already exists in Distribution')
expect(new_package.errors.to_a).to include('Name has already been taken')
end
it "will allow a Debian package with same project, name, version, but no distribution" do
it "will not allow a Debian package with same project, name, version, but no distribution" do
new_package = build(:debian_package, project: package.project, name: package.name, version: package.version, published_in: nil)
expect(new_package).to be_valid
expect(new_package).not_to be_valid
expect(new_package.errors.to_a).to include('Name has already been taken')
end
context 'with pending_destruction package' do
@ -713,7 +709,7 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
Packages::Package.package_types.keys.without('conan', 'debian').each do |pt|
Packages::Package.package_types.keys.without('conan').each do |pt|
context "project id, name, version and package type uniqueness for package type #{pt}" do
let(:package) { create("#{pt}_package") }
@ -722,6 +718,15 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
expect(new_package).not_to be_valid
expect(new_package.errors.to_a).to include("Name has already been taken")
end
context 'with pending_destruction package' do
let!(:package) { create("#{pt}_package", :pending_destruction) }
it "will allow a #{pt} package with same project, name, version and package_type" do
new_package = build("#{pt}_package", project: package.project, name: package.name, version: package.version)
expect(new_package).to be_valid
end
end
end
end
end

View File

@ -62,18 +62,6 @@ RSpec.describe 'getting project fork details', feature_category: :source_code_ma
end
end
context 'when fork_divergence_counts feature flag is disabled' do
before do
stub_feature_flags(fork_divergence_counts: false)
end
it 'does not return fork details' do
post_graphql(query, current_user: current_user)
expect(graphql_data['project']['forkDetails']).to be_nil
end
end
context 'when a user cannot read the code' do
let_it_be(:current_user) { create(:user) }

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Import::GithubController, feature_category: :importers do
describe 'GET details' do
subject { get details_import_github_path }
let_it_be(:user) { create(:user) }
before do
login_as(user)
end
context 'with feature enabled' do
before do
stub_feature_flags(import_details_page: true)
subject
end
it 'responds with a 200 and shows the template' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:details)
end
end
context 'with feature disabled' do
before do
stub_feature_flags(import_details_page: false)
subject
end
it 'responds with a 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end

View File

@ -64,7 +64,6 @@ RSpec.describe Projects::WikisController, feature_category: :wiki do
before do
# Setting an invalid page title to render edit page
put wiki_page_path(project_wiki, wiki_page), params: { wiki: { title: '' } }
print(response.body)
end
it_behaves_like 'embed.diagrams.net frame-src directive'

View File

@ -4,13 +4,17 @@ require 'spec_helper'
RSpec.describe Packages::Debian::FindOrCreatePackageService, feature_category: :package_registry do
let_it_be(:distribution) { create(:debian_project_distribution, :with_suite) }
let_it_be(:distribution2) { create(:debian_project_distribution, :with_suite) }
let_it_be(:project) { distribution.project }
let_it_be(:user) { create(:user) }
let(:service) { described_class.new(project, user, params) }
let(:params2) { params }
let(:service2) { described_class.new(project, user, params2) }
let(:package) { subject.payload[:package] }
let(:package2) { service.execute.payload[:package] }
let(:package2) { service2.execute.payload[:package] }
shared_examples 'find or create Debian package' do
it 'returns the same object' do
@ -55,11 +59,24 @@ RSpec.describe Packages::Debian::FindOrCreatePackageService, feature_category: :
it_behaves_like 'find or create Debian package'
end
context 'with existing package in another distribution' do
let(:params) { { name: 'foo', version: '1.0+debian', distribution_name: distribution.codename } }
let(:params2) { { name: 'foo', version: '1.0+debian', distribution_name: distribution2.codename } }
it 'raises ArgumentError' do
expect { subject }.to change { ::Packages::Package.count }.by(1)
expect { package2 }.to raise_error(ArgumentError, "Debian package #{package.name} #{package.version} exists " \
"in distribution #{distribution.codename}")
end
end
context 'with non-existing distribution' do
let(:params) { { name: 'foo', version: '1.0+debian', distribution_name: 'not-existing' } }
it 'raises ActiveRecord::RecordNotFound' do
expect { package }.to raise_error(ActiveRecord::RecordNotFound)
expect { package }.to raise_error(ActiveRecord::RecordNotFound,
/^Couldn't find Packages::Debian::ProjectDistribution/)
end
end
end

View File

@ -1,4 +1,5 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Packages::Debian::ProcessChangesService, feature_category: :package_registry do
@ -55,7 +56,7 @@ RSpec.describe Packages::Debian::ProcessChangesService, feature_category: :packa
it_behaves_like 'raises error with missing field', 'Distribution'
end
context 'with existing package' do
context 'with existing package in the same distribution' do
let_it_be_with_reload(:existing_package) do
create(:debian_package, name: 'sample', version: '1.2.3~alpha2', project: distribution.project, published_in: distribution)
end
@ -64,10 +65,37 @@ RSpec.describe Packages::Debian::ProcessChangesService, feature_category: :packa
expect { subject.execute }
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
.and change(package_file, :package).to(existing_package)
.and change { package_file.package }.to(existing_package)
end
context 'marked as pending_destruction' do
context 'and marked as pending_destruction' do
it 'does not re-use the existing package' do
existing_package.pending_destruction!
expect { subject.execute }
.to change { Packages::Package.count }.by(1)
.and not_change { Packages::PackageFile.count }
end
end
end
context 'with existing package in another distribution' do
let_it_be_with_reload(:existing_package) do
create(:debian_package, name: 'sample', version: '1.2.3~alpha2', project: distribution.project)
end
it 'raise ExtractionError' do
expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
expect { subject.execute }
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
.and not_change { incoming.package_files.count }
.and raise_error(ArgumentError,
"Debian package #{existing_package.name} #{existing_package.version} exists " \
"in distribution #{existing_package.debian_distribution.codename}")
end
context 'and marked as pending_destruction' do
it 'does not re-use the existing package' do
existing_package.pending_destruction!

View File

@ -1,4 +1,5 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :package_registry do
@ -19,14 +20,14 @@ RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :p
expect { subject.execute }
.to not_change(Packages::Package, :count)
.and not_change(Packages::PackageFile, :count)
.and change(Packages::Debian::Publication, :count).by(1)
.and change { Packages::Debian::Publication.count }.by(1)
.and not_change(package.package_files, :count)
.and change { package.reload.name }.to('sample')
.and change { package.reload.version }.to('1.2.3~alpha2')
.and change { package.reload.status }.from('processing').to('default')
.and change { package.reload.debian_publication }.from(nil)
.and change(debian_file_metadatum, :file_type).from('unknown').to(expected_file_type)
.and change(debian_file_metadatum, :component).from(nil).to(component_name)
.and change { debian_file_metadatum.file_type }.from('unknown').to(expected_file_type)
.and change { debian_file_metadatum.component }.from(nil).to(component_name)
end
end
@ -67,21 +68,42 @@ RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :p
expect(::Packages::Debian::GenerateDistributionWorker)
.to receive(:perform_async).with(:project, distribution.id)
expect { subject.execute }
.to change(Packages::Package, :count).from(2).to(1)
.and change(Packages::PackageFile, :count).from(16).to(9)
.to change { Packages::Package.count }.from(2).to(1)
.and change { Packages::PackageFile.count }.from(16).to(9)
.and not_change(Packages::Debian::Publication, :count)
.and change(package.package_files, :count).from(8).to(0)
.and change(package_file, :package).from(package).to(matching_package)
.and change { package.package_files.count }.from(8).to(0)
.and change { package_file.package }.from(package).to(matching_package)
.and not_change(matching_package, :name)
.and not_change(matching_package, :version)
.and change(debian_file_metadatum, :file_type).from('unknown').to(expected_file_type)
.and change(debian_file_metadatum, :component).from(nil).to(component_name)
.and change { debian_file_metadatum.file_type }.from('unknown').to(expected_file_type)
.and change { debian_file_metadatum.component }.from(nil).to(component_name)
expect { package.reload }
.to raise_error(ActiveRecord::RecordNotFound)
end
end
context 'when there is a matching published package in another distribution' do
let!(:matching_package) do
create(
:debian_package,
project: distribution.project,
name: 'sample',
version: '1.2.3~alpha2'
)
end
it 'raise ArgumentError', :aggregate_failures do
expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
expect { subject.execute }
.to not_change(Packages::Package, :count)
.and not_change(Packages::PackageFile, :count)
.and not_change(package.package_files, :count)
.and raise_error(ArgumentError, "Debian package sample 1.2.3~alpha2 exists " \
"in distribution #{matching_package.debian_distribution.codename}")
end
end
context 'when there is a matching published package pending destruction' do
let!(:matching_package) do
create(

View File

@ -179,6 +179,7 @@ RSpec.configure do |config|
config.include DetailedErrorHelpers
config.include RequestUrgencyMatcher, type: :controller
config.include RequestUrgencyMatcher, type: :request
config.include Capybara::RSpecMatchers, type: :request
config.include_context 'when rendered has no HTML escapes', type: :view

View File

@ -123,7 +123,6 @@
- './ee/spec/controllers/projects/branches_controller_spec.rb'
- './ee/spec/controllers/projects/clusters_controller_spec.rb'
- './ee/spec/controllers/projects_controller_spec.rb'
- './ee/spec/controllers/projects/dependencies_controller_spec.rb'
- './ee/spec/controllers/projects/deploy_keys_controller_spec.rb'
- './ee/spec/controllers/projects/environments_controller_spec.rb'
- './ee/spec/controllers/projects/feature_flag_issues_controller_spec.rb'

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
require 'tmpdir'
module TmpdirHelper
def mktmpdir
@tmpdir_helper_dirs ||= []

View File

@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'rake_helper'
require_relative "../../tooling/lib/tooling/gettext_extractor"
RSpec.describe 'gettext', :silence_stdout, feature_category: :internationalization do
let(:locale_path) { Rails.root.join('tmp/gettext_spec') }
@ -37,19 +38,17 @@ RSpec.describe 'gettext', :silence_stdout, feature_category: :internationalizati
describe ':regenerate' do
let(:locale_nz_path) { File.join(locale_path, 'en_NZ') }
let(:po_file_path) { File.join(locale_nz_path, 'gitlab.po') }
let(:extractor) { instance_double(Tooling::GettextExtractor, generate_pot: '') }
before do
FileUtils.mkdir(locale_nz_path)
File.write(po_file_path, fixture_file('valid.po'))
Rake::Task['gettext:setup'].invoke
# this task takes a *really* long time to complete, so stub it for the spec
allow(Rake::Task['gettext:pot:create']).to receive(:invoke) { invoke_find.call }
allow(Tooling::GettextExtractor).to receive(:new).and_return(extractor)
end
context 'when the locale folder is not found' do
let(:invoke_find) { -> { true } }
before do
FileUtils.rm_r(locale_path) if Dir.exist?(locale_path)
end
@ -61,33 +60,13 @@ RSpec.describe 'gettext', :silence_stdout, feature_category: :internationalizati
end
context 'when the gitlab.pot file cannot be generated' do
let(:invoke_find) { -> { true } }
it 'prints an error' do
allow(File).to receive(:exist?).and_return(false)
expect { run_rake_task('gettext:regenerate') }
.to raise_error(/gitlab.pot file not generated/)
end
end
context 'when gettext:pot:create changes the revision dates' do
let(:invoke_find) { -> { File.write pot_file_path, fixture_file('valid.po') } }
before do
File.write pot_file_path, fixture_file('valid.po')
end
it 'resets the changes' do
pot_file = File.read(pot_file_path)
expect(pot_file).to include('PO-Revision-Date: 2017-07-13 12:10-0500')
expect(pot_file).to include('PO-Creation-Date: 2016-07-13 12:11-0500')
run_rake_task('gettext:regenerate')
pot_file = File.read(pot_file_path)
expect(pot_file).not_to include('PO-Revision-Date: 2017-07-13 12:10-0500')
expect(pot_file).not_to include('PO-Creation-Date: 2016-07-13 12:11-0500')
end
end
end
describe ':lint' do

View File

@ -0,0 +1,254 @@
# frozen_string_literal: true
require 'rspec/parameterized'
require_relative '../../../../tooling/lib/tooling/gettext_extractor'
require_relative '../../../support/helpers/stub_env'
require_relative '../../../support/tmpdir'
RSpec.describe Tooling::GettextExtractor, feature_category: :tooling do
include StubENV
include TmpdirHelper
let(:base_dir) { mktmpdir }
let(:instance) { described_class.new(backend_glob: '*.{rb,haml,erb}', glob_base: base_dir) }
let(:frontend_status) { true }
let(:files) do
{
rb_file: File.join(base_dir, 'ruby.rb'),
haml_file: File.join(base_dir, 'template.haml'),
erb_file: File.join(base_dir, 'template.erb')
}
end
before do
# Disable parallelism in specs in order to suppress some confusing stack traces
stub_env(
'PARALLEL_PROCESSOR_COUNT' => 0
)
# Mock Backend files
File.write(files[:rb_file], '[_("RB"), _("All"), n_("Apple", "Apples", size), s_("Context|A"), N_("All2") ]')
File.write(
files[:erb_file],
'<h1><%= _("ERB") + _("All") + n_("Pear", "Pears", size) + s_("Context|B") + N_("All2") %></h1>'
)
File.write(
files[:haml_file],
'%h1= _("HAML") + _("All") + n_("Cabbage", "Cabbages", size) + s_("Context|C") + N_("All2")'
)
# Stub out Frontend file parsing
status = {}
allow(status).to receive(:success?).and_return(frontend_status)
allow(Open3).to receive(:capture2)
.with("node scripts/frontend/extract_gettext_all.js --all")
.and_return([
'{"example.js": [ ["JS"], ["All"], ["Mango\u0000Mangoes"], ["Context|D"], ["All2"] ] }',
status
])
end
describe '::HamlParser' do
it 'overwrites libraries in order to prefer hamlit' do
expect(described_class::HamlParser.libraries).to match_array(['hamlit'])
end
end
describe '#parse' do
it 'collects and merges translatable strings from frontend and backend' do
expect(instance.parse([]).to_h { |entry| [entry.msgid, entry.msgid_plural] }).to eq({
'All' => nil,
'All2' => nil,
'Context|A' => nil,
'Context|B' => nil,
'Context|C' => nil,
'Context|D' => nil,
'ERB' => nil,
'HAML' => nil,
'JS' => nil,
'RB' => nil,
'Apple' => 'Apples',
'Cabbage' => 'Cabbages',
'Mango' => 'Mangoes',
'Pear' => 'Pears'
})
end
it 're-raises error from backend extraction' do
allow(instance).to receive(:parse_backend_file).and_raise(StandardError)
expect { instance.parse([]) }.to raise_error(StandardError)
end
context 'when frontend extraction raises an error' do
let(:frontend_status) { false }
it 'is re-raised' do
expect { instance.parse([]) }.to raise_error(StandardError, 'Could not parse frontend files')
end
end
end
describe '#generate_pot' do
subject { instance.generate_pot }
it 'produces pot without date headers' do
expect(subject).not_to include('POT-Creation-Date:')
expect(subject).not_to include('PO-Revision-Date:')
end
it 'produces pot file with all translated strings, sorted by msg id' do
expect(subject).to eql <<~POT_FILE
# SOME DESCRIPTIVE TITLE.
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
# This file is distributed under the same license as the gitlab package.
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: gitlab 1.0.0\\n"
"Report-Msgid-Bugs-To: \\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\\n"
"Language-Team: LANGUAGE <LL@li.org>\\n"
"Language: \\n"
"MIME-Version: 1.0\\n"
"Content-Type: text/plain; charset=UTF-8\\n"
"Content-Transfer-Encoding: 8bit\\n"
"Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\\n"
msgid "All"
msgstr ""
msgid "All2"
msgstr ""
msgid "Apple"
msgid_plural "Apples"
msgstr[0] ""
msgstr[1] ""
msgid "Cabbage"
msgid_plural "Cabbages"
msgstr[0] ""
msgstr[1] ""
msgid "Context|A"
msgstr ""
msgid "Context|B"
msgstr ""
msgid "Context|C"
msgstr ""
msgid "Context|D"
msgstr ""
msgid "ERB"
msgstr ""
msgid "HAML"
msgstr ""
msgid "JS"
msgstr ""
msgid "Mango"
msgid_plural "Mangoes"
msgstr[0] ""
msgstr[1] ""
msgid "Pear"
msgid_plural "Pears"
msgstr[0] ""
msgstr[1] ""
msgid "RB"
msgstr ""
POT_FILE
end
end
# This private methods is tested directly, because unfortunately it is called
# with the "Parallel" gem. As the parallel gem executes this function in a different
# thread, our coverage reporting is confused
#
# On the other hand, the tests are also more readable, so maybe a win-win
describe '#parse_backend_file' do
subject { instance.send(:parse_backend_file, curr_file) }
where do
{
'with ruby file' => {
invalid_syntax: 'x = {id: _("RB")',
file: :rb_file,
result: {
'All' => nil,
'All2' => nil,
'Context|A' => nil,
'RB' => nil, 'Apple' => 'Apples'
}
},
'with haml file' => {
invalid_syntax: " %a\n- content = _('HAML')",
file: :haml_file,
result: {
'All' => nil,
'All2' => nil,
'Context|C' => nil,
'HAML' => nil,
'Cabbage' => 'Cabbages'
}
},
'with erb file' => {
invalid_syntax: "<% x = {id: _('ERB') %>",
file: :erb_file,
result: {
'All' => nil,
'All2' => nil,
'Context|B' => nil,
'ERB' => nil,
'Pear' => 'Pears'
}
}
}
end
with_them do
let(:curr_file) { files[file] }
context 'when file has valid syntax' do
it 'parses file and returns extracted strings as POEntries' do
expect(subject.map(&:class).uniq).to match_array([GetText::POEntry])
expect(subject.to_h { |entry| [entry.msgid, entry.msgid_plural] }).to eq(result)
end
end
# We do not worry about syntax errors in these file types, as it is _not_ the job of
# gettext extractor to ensure correctness of the files. These errors should raise
# in other places
context 'when file has invalid syntax' do
before do
File.write(curr_file, invalid_syntax)
end
it 'does not raise error' do
expect { subject }.not_to raise_error
end
end
end
context 'with unsupported file' do
let(:curr_file) { File.join(base_dir, 'foo.unsupported') }
before do
File.write(curr_file, '')
end
it 'raises error' do
expect { subject }.to raise_error(NotImplementedError)
end
end
end
end

View File

@ -199,18 +199,6 @@ RSpec.describe 'projects/_home_panel' do
expect(rendered).not_to have_content("Forked from #{source_project.full_name}")
end
context 'when fork_divergence_counts is disabled' do
before do
stub_feature_flags(fork_divergence_counts: false)
end
it 'shows the forked-from project' do
render
expect(rendered).to have_content("Forked from #{source_project.full_name}")
end
end
end
context 'user cannot read fork source' do
@ -223,18 +211,6 @@ RSpec.describe 'projects/_home_panel' do
expect(rendered).not_to have_content("Forked from an inaccessible project")
end
context 'when fork_divergence_counts is disabled' do
before do
stub_feature_flags(fork_divergence_counts: false)
end
it 'shows the message that forked project is inaccessible' do
render
expect(rendered).to have_content("Forked from an inaccessible project")
end
end
end
end
end

29
tooling/bin/gettext_extractor Executable file
View File

@ -0,0 +1,29 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require_relative '../lib/tooling/gettext_extractor'
pot_file = ARGV.shift
if !pot_file || !Dir.exist?(File.dirname(pot_file))
abort <<~MSG
Please provide a target file name as the first argument, e.g.
#{$PROGRAM_NAME} locale/gitlab.pot
MSG
end
puts <<~MSG
Extracting translatable strings from source files...
MSG
root_dir = File.expand_path('../../', __dir__)
extractor = Tooling::GettextExtractor.new(
glob_base: root_dir
)
File.write(pot_file, extractor.generate_pot)
puts <<~MSG
All done. Please commit the changes to `#{pot_file}`.
MSG

View File

@ -0,0 +1,106 @@
# frozen_string_literal: true
require 'parallel'
require 'gettext/po'
require 'gettext/po_entry'
require 'gettext/tools/parser/erb'
require 'gettext/tools/parser/ruby'
require 'gettext_i18n_rails/haml_parser'
require 'json'
require 'open3'
module Tooling
class GettextExtractor < GetText::Tools::XGetText
class HamlParser < GettextI18nRails::HamlParser
# If both `haml` and `hamlit` are available,
# the parser prefers `haml`. `hamlit` should be faster
def self.libraries
["hamlit"]
end
end
def initialize(
backend_glob: "{ee,app,lib,config,locale}/**/*.{rb,erb,haml}",
glob_base: nil,
package_name: 'gitlab',
package_version: '1.0.0'
)
super()
@backend_glob = backend_glob
@package_name = package_name
@glob_base = glob_base || Dir.pwd
@package_version = package_version
# Ensure that the messages are ordered by id
@po_order = :msgid
@po_format_options = {
# No line breaks within a message
max_line_width: -1,
# Do not print references to files
include_reference_comment: false
}
end
def parse(_paths)
po = GetText::PO.new
parse_backend_files.each do |po_entry|
merge_po_entries(po, po_entry)
end
parse_frontend_files.each do |po_entry|
merge_po_entries(po, po_entry)
end
po
end
# Overrides method from GetText::Tools::XGetText
# This makes a method public and passes in an empty array of paths,
# as our overidden "parse" method needs no paths
def generate_pot
super([])
end
private
# Overrides method from GetText::Tools::XGetText
# in order to remove revision dates, as we check in our locale/gitlab.pot
def header_content
super.gsub(/^POT?-(?:Creation|Revision)-Date:.*\n/, '')
end
def merge_po_entries(po, po_entry)
existing_entry = po[po_entry.msgctxt, po_entry.msgid]
po_entry = existing_entry.merge(po_entry) if existing_entry
po[po_entry.msgctxt, po_entry.msgid] = po_entry
end
def parse_backend_file(path)
case ::File.extname(path)
when '.rb'
GetText::RubyParser.new(path).parse
when '.haml'
HamlParser.parse(path).collect { |item| create_po_entry(*item) }
when '.erb'
GetText::ErbParser.new(path).parse
else
raise NotImplementedError
end
end
def parse_backend_files
files = Dir.glob(File.join(@glob_base, @backend_glob))
Parallel.flat_map(files) { |item| parse_backend_file(item) }
end
def parse_frontend_files
results, status = Open3.capture2('node scripts/frontend/extract_gettext_all.js --all')
raise StandardError, "Could not parse frontend files" unless status.success?
# rubocop:disable Gitlab/Json
JSON.parse(results)
.values
.flatten(1)
.collect { |entry| create_po_entry(*entry) }
# rubocop:enable Gitlab/Json
end
end
end