Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-07-06 18:10:31 +00:00
parent 6d3676d610
commit eec8ec6e4e
187 changed files with 1000 additions and 642 deletions

View File

@ -14,3 +14,6 @@ include:
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-schema-validation"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-ipynbdiff"

View File

@ -14,7 +14,7 @@ review-build-cng:
review-deploy-env:
allow_failure: true
stage: deploy
needs: ["release-environments-build-cng"]
needs: ["review-build-cng"]
variables:
DEPLOY_ENV: deploy.env
script:

View File

@ -44,6 +44,12 @@ review-build-cng:
variables:
HOST_SUFFIX: "${CI_ENVIRONMENT_SLUG}"
DOMAIN: "-${CI_ENVIRONMENT_SLUG}.${REVIEW_APPS_DOMAIN}"
GITLAB_HELM_CHART_PROJECT_URL: "https://gitlab.com/gitlab-org/charts/gitlab"
GITLAB_HELM_REPO_URL: "https://charts.gitlab.io"
GITLAB_REPO_URL: ${CI_PROJECT_URL}
GITLAB_IMAGE_REPOSITORY: "registry.gitlab.com/gitlab-org/build/cng-mirror"
GITLAB_IMAGE_SUFFIX: "ee"
GITLAB_VERIFY_DEPLOY_TIMEOUT_MINUTES: 5
GITLAB_HELM_CHART_REF: "75b1486a9aec212d0f49ef1251526d8e51004bbc" # 7.0.1: https://gitlab.com/gitlab-org/charts/gitlab/-/commit/75b1486a9aec212d0f49ef1251526d8e51004bbc
environment:
name: review/${CI_COMMIT_REF_SLUG}${SCHEDULE_TYPE} # No separator for SCHEDULE_TYPE so it's compatible as before and looks nice without it

View File

@ -11,10 +11,6 @@ include:
inputs:
gem_name: "microsoft_graph_mailer"
gem_path_prefix: "vendor/gems/"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "ipynbdiff"
gem_path_prefix: "vendor/gems/"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "omniauth-azure-oauth2"

View File

@ -146,11 +146,14 @@ The Geo primary site needs to checksum every replicable so secondaries can verif
enable_lock_retries!
def up
create_table :cool_widget_states, id: false do |t|
create_table :cool_widget_states do |t|
t.datetime_with_timezone :verification_started_at
t.datetime_with_timezone :verification_retry_at
t.datetime_with_timezone :verified_at
t.references :cool_widget, primary_key: true, default: nil, index: false, foreign_key: { on_delete: :cascade }
t.references :cool_widget,
null: false,
index: { unique: true },
foreign_key: { on_delete: :cascade }
t.integer :verification_state, default: 0, limit: 2, null: false
t.integer :verification_retry_count, default: 0, limit: 2, null: false
t.binary :verification_checksum, using: 'verification_checksum::bytea'
@ -292,6 +295,11 @@ That's all of the required database changes.
# Search the codebase for examples, and consult a Geo expert if needed.
end
override :verification_state_model_key
def verification_state_model_key
:cool_widget_id
end
override :verification_state_table_class
def verification_state_table_class
CoolWidgetState

View File

@ -148,14 +148,13 @@ The Geo primary site needs to checksum every replicable so secondaries can verif
enable_lock_retries!
def up
create_table :cool_widget_states, id: false do |t|
create_table :cool_widget_states do |t|
t.datetime_with_timezone :verification_started_at
t.datetime_with_timezone :verification_retry_at
t.datetime_with_timezone :verified_at
t.references :cool_widget,
primary_key: true,
default: nil,
index: false,
null: false,
index: { unique: true },
foreign_key: { on_delete: :cascade }
t.integer :verification_state, default: 0, limit: 2, null: false
t.integer :verification_retry_count, default: 0, limit: 2, null: false
@ -298,6 +297,11 @@ That's all of the required database changes.
# Search the codebase for examples, and consult a Geo expert if needed.
end
override :verification_state_model_key
def verification_state_model_key
:cool_widget_id
end
override :verification_state_table_class
def verification_state_table_class
CoolWidgetState

View File

@ -28,7 +28,6 @@ Naming/InclusiveLanguage:
- 'ee/app/controllers/projects/push_rules_controller.rb'
- 'ee/lib/arkose/verify_response.rb'
- 'ee/lib/system_check/geo/http_connection_check.rb'
- 'ee/spec/lib/gitlab/checks/diff_check_spec.rb'
- 'ee/spec/models/dora/lead_time_for_changes_metric_spec.rb'
- 'lib/api/entities/application_setting.rb'
- 'lib/api/settings.rb'
@ -45,31 +44,18 @@ Naming/InclusiveLanguage:
- 'lib/gitlab/sanitizers/svg.rb'
- 'lib/gitlab/sanitizers/svg/whitelist.rb'
- 'lib/system_check/app/git_user_default_ssh_config_check.rb'
- 'rubocop/cop/active_record_association_reload.rb'
- 'rubocop/cop/avoid_becomes.rb'
- 'rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers.rb'
- 'rubocop/cop/avoid_return_from_blocks.rb'
- 'rubocop/cop/default_scope.rb'
- 'rubocop/cop/destroy_all.rb'
- 'rubocop/cop/graphql/id_type.rb'
- 'rubocop/cop/group_public_or_visible_to_user.rb'
- 'rubocop/cop/inject_enterprise_edition_module.rb'
- 'rubocop/cop/migration/add_columns_to_wide_tables.rb'
- 'spec/controllers/concerns/issuable_collections_spec.rb'
- 'spec/controllers/health_check_controller_spec.rb'
- 'spec/controllers/metrics_controller_spec.rb'
- 'spec/features/projects/import_export/export_file_spec.rb'
- 'spec/helpers/markup_helper_spec.rb'
- 'spec/lib/banzai/filter/asset_proxy_filter_spec.rb'
- 'spec/lib/gitlab/asset_proxy_spec.rb'
- 'spec/lib/gitlab/auth/ip_rate_limiter_spec.rb'
- 'spec/lib/gitlab/git/hook_env_spec.rb'
- 'spec/lib/gitlab/github_import/markdown/attachment_spec.rb'
- 'spec/lib/gitlab/import_export/attribute_configuration_spec.rb'
- 'spec/lib/gitlab/import_export/references_configuration_spec.rb'
- 'spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb'
- 'spec/lib/gitlab/middleware/basic_health_check_spec.rb'
- 'spec/lib/gitlab/middleware/go_spec.rb'
- 'spec/lib/gitlab/sanitizers/exif_spec.rb'
- 'spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb'
- 'spec/models/application_setting_spec.rb'

View File

@ -593,7 +593,7 @@ gem 'ipaddress', '~> 0.8.3'
gem 'parslet', '~> 1.8'
gem 'ipynbdiff', path: 'vendor/gems/ipynbdiff'
gem 'ipynbdiff', path: 'gems/ipynbdiff', require: 'ipynb_diff'
gem 'ed25519', '~> 1.3.0'

View File

@ -34,6 +34,13 @@ PATH
nokogiri (~> 1.15.2)
rake (~> 13.0)
PATH
remote: gems/ipynbdiff
specs:
ipynbdiff (0.4.7)
diffy (~> 3.4)
oj (~> 3.13.16)
PATH
remote: vendor/gems/attr_encrypted
specs:
@ -68,13 +75,6 @@ PATH
error_tracking_open_api (1.0.0)
typhoeus (~> 1.0, >= 1.0.1)
PATH
remote: vendor/gems/ipynbdiff
specs:
ipynbdiff (0.4.7)
diffy (~> 3.4)
oj (~> 3.13.16)
PATH
remote: vendor/gems/mail-smtp_pool
specs:

View File

@ -122,14 +122,12 @@ export default {
name: '',
token: '',
url: '',
apiUrl: '',
},
activeTabIndex: this.tabIndex,
currentIntegration: null,
parsedPayload: [],
validationState: {
name: true,
apiUrl: true,
},
pricingLink: `${PROMO_URL}/pricing`,
};
@ -188,20 +186,14 @@ export default {
);
},
isFormDirty() {
const { type, active, name, apiUrl, payloadAlertFields = [], payloadAttributeMappings = [] } =
const { type, active, name, payloadAlertFields = [], payloadAttributeMappings = [] } =
this.currentIntegration || {};
const {
name: formName,
apiUrl: formApiUrl,
active: formActive,
type: formType,
} = this.integrationForm;
const { name: formName, active: formActive, type: formType } = this.integrationForm;
const isDirty =
type !== formType ||
active !== formActive ||
name !== formName ||
apiUrl !== formApiUrl ||
!isEqual(this.parsedPayload, payloadAlertFields) ||
!isEqual(this.mapping, this.getCleanMapping(payloadAttributeMappings));
@ -211,25 +203,19 @@ export default {
return this.isFormValid && this.isFormDirty;
},
dataForSave() {
const { name, apiUrl, active } = this.integrationForm;
const { name, active } = this.integrationForm;
const customMappingVariables = {
payloadAttributeMappings: this.mapping,
payloadExample: this.samplePayload.json || '{}',
};
const variables = this.isHttp
? { name, active, ...customMappingVariables }
: { apiUrl, active };
const variables = this.isHttp ? { name, active, ...customMappingVariables } : { active };
return { type: this.integrationForm.type, variables };
},
testAlertModal() {
return this.isFormDirty ? testAlertModalId : null;
},
prometheusUrlInvalidFeedback() {
const { blankUrlError, invalidUrlError } = i18n.integrationFormSteps.prometheusFormUrl;
return this.integrationForm.apiUrl?.length ? invalidUrlError : blankUrlError;
},
},
watch: {
tabIndex(val) {
@ -247,13 +233,12 @@ export default {
type,
active,
url,
apiUrl,
token,
payloadExample,
payloadAlertFields,
payloadAttributeMappings,
} = val;
this.integrationForm = { type, name, active, url, apiUrl, token };
this.integrationForm = { type, name, active, url, token };
if (this.showMappingBuilder) {
this.resetPayloadAndMappingConfirmed = false;
@ -271,14 +256,6 @@ export default {
validateName() {
this.validationState.name = Boolean(this.integrationForm.name?.length);
},
validateApiUrl() {
try {
const parsedUrl = new URL(this.integrationForm.apiUrl);
this.validationState.apiUrl = ['http:', 'https:'].includes(parsedUrl.protocol);
} catch (e) {
this.validationState.apiUrl = false;
}
},
isValidNonEmptyJSON(JSONString) {
if (JSONString) {
let parsed;
@ -298,14 +275,12 @@ export default {
},
triggerValidation() {
if (this.isHttp) {
this.validationState.apiUrl = true;
this.validateName();
if (!this.validationState.name) {
this.$refs.integrationName.$el.scrollIntoView({ behavior: 'smooth', block: 'center' });
}
} else if (this.isPrometheus) {
this.validationState.name = true;
this.validateApiUrl();
}
},
sendTestAlert() {
@ -332,7 +307,6 @@ export default {
this.integrationForm.type = integrationTypes.none.value;
this.integrationForm.name = '';
this.integrationForm.active = false;
this.integrationForm.apiUrl = '';
this.samplePayload = {
json: null,
error: null,
@ -490,28 +464,6 @@ export default {
class="gl-mt-4 gl-font-weight-normal"
/>
</gl-form-group>
<gl-form-group
v-if="isPrometheus"
class="gl-my-4"
:label="$options.i18n.integrationFormSteps.prometheusFormUrl.label"
label-for="api-url"
:invalid-feedback="prometheusUrlInvalidFeedback"
:state="validationState.apiUrl"
>
<gl-form-input
id="api-url"
v-model="integrationForm.apiUrl"
type="text"
:placeholder="$options.placeholders.prometheus"
data-qa-selector="prometheus_url_field"
@input="validateApiUrl"
/>
<span class="gl-text-gray-400">
{{ $options.i18n.integrationFormSteps.prometheusFormUrl.help }}
</span>
</gl-form-group>
<template v-if="showMappingBuilder">
<gl-form-group
data-testid="sample-payload-section"

View File

@ -65,12 +65,6 @@ export const i18n = {
proceedWithoutSave: s__('AlertSettings|Send without saving'),
cancel: __('Cancel'),
},
prometheusFormUrl: {
label: s__('AlertSettings|Prometheus API base URL'),
help: s__('AlertSettings|URL cannot be blank and must start with http: or https:.'),
blankUrlError: __('URL cannot be blank'),
invalidUrlError: __('URL is invalid'),
},
restKeyInfo: {
label: s__(
'AlertSettings|If you reset the authorization key for this project, you must update the key in every enabled alert source.',

View File

@ -5,5 +5,4 @@ fragment IntegrationItem on AlertManagementIntegration {
name
url
token
apiUrl
}

View File

@ -1,9 +1,7 @@
#import "../fragments/integration_item.fragment.graphql"
mutation createPrometheusIntegration($projectPath: ID!, $apiUrl: String!, $active: Boolean!) {
prometheusIntegrationCreate(
input: { projectPath: $projectPath, apiUrl: $apiUrl, active: $active }
) {
mutation createPrometheusIntegration($projectPath: ID!, $active: Boolean!) {
prometheusIntegrationCreate(input: { projectPath: $projectPath, active: $active }) {
errors
integration {
...IntegrationItem

View File

@ -4,9 +4,9 @@ import {
GlButton,
GlDropdown,
GlDropdownItem,
GlEmptyState,
GlFormGroup,
GlFormInputGroup,
GlSkeletonLoader,
GlModal,
GlModalDirective,
GlSprintf,
@ -17,7 +17,6 @@ import Api from '~/api';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import ManifestsList from '~/packages_and_registries/dependency_proxy/components/manifests_list.vue';
import { DEPENDENCY_PROXY_DOCS_PATH } from '~/packages_and_registries/settings/group/constants';
import { GRAPHQL_PAGE_SIZE } from '~/packages_and_registries/dependency_proxy/constants';
import getDependencyProxyDetailsQuery from '~/packages_and_registries/dependency_proxy/graphql/queries/get_dependency_proxy_details.query.graphql';
@ -28,7 +27,7 @@ export default {
GlButton,
GlDropdown,
GlDropdownItem,
GlEmptyState,
GlSkeletonLoader,
GlFormGroup,
GlFormInputGroup,
GlModal,
@ -41,13 +40,12 @@ export default {
GlModalDirective,
GlTooltip: GlTooltipDirective,
},
inject: ['groupPath', 'groupId', 'noManifestsIllustration', 'canClearCache', 'settingsPath'],
inject: ['groupPath', 'groupId', 'canClearCache', 'settingsPath'],
i18n: {
proxyImagePrefix: s__('DependencyProxy|Dependency Proxy image prefix'),
copyImagePrefixText: s__('DependencyProxy|Copy prefix'),
blobCountAndSize: s__('DependencyProxy|Contains %{count} blobs of images (%{size})'),
pageTitle: s__('DependencyProxy|Dependency Proxy'),
noManifestTitle: s__('DependencyProxy|There are no images in the cache'),
deleteCacheAlertMessageSuccess: s__(
'DependencyProxy|All items in the cache are scheduled for removal.',
),
@ -64,9 +62,6 @@ export default {
text: __('Cancel'),
},
},
links: {
DEPENDENCY_PROXY_DOCS_PATH,
},
data() {
return {
group: {},
@ -90,7 +85,7 @@ export default {
return this.group.dependencyProxyManifests?.pageInfo;
},
manifests() {
return this.group.dependencyProxyManifests?.nodes;
return this.group.dependencyProxyManifests?.nodes ?? [];
},
modalTitleWithCount() {
return sprintf(
@ -199,10 +194,18 @@ export default {
</template>
</title-area>
<gl-form-group v-if="showDependencyProxyImagePrefix" :label="$options.i18n.proxyImagePrefix">
<gl-skeleton-loader v-if="$apollo.queries.group.loading" />
<gl-form-group
v-if="showDependencyProxyImagePrefix"
:label="$options.i18n.proxyImagePrefix"
label-for="proxy-url"
>
<gl-form-input-group
id="proxy-url"
readonly
:value="group.dependencyProxyImagePrefix"
select-on-click
class="gl-layout-w-limited"
data-testid="proxy-url"
>
@ -224,7 +227,6 @@ export default {
</gl-form-group>
<manifests-list
v-if="manifests && manifests.length"
:dependency-proxy-image-prefix="dependencyProxyImagePrefix"
:loading="$apollo.queries.group.loading"
:manifests="manifests"
@ -233,12 +235,6 @@ export default {
@next-page="fetchNextPage"
/>
<gl-empty-state
v-else
:svg-path="noManifestsIllustration"
:title="$options.i18n.noManifestTitle"
/>
<gl-modal
:modal-id="$options.confirmClearCacheModal"
:title="modalTitleWithCount"

View File

@ -0,0 +1,80 @@
<script>
import { GlEmptyState, GlFormGroup, GlFormInputGroup, GlLink, GlSprintf } from '@gitlab/ui';
import { s__ } from '~/locale';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import { DEPENDENCY_PROXY_HELP_PAGE_PATH } from '~/packages_and_registries/dependency_proxy/constants';
export default {
name: 'ManifestsEmptyState',
components: {
ClipboardButton,
GlEmptyState,
GlFormGroup,
GlFormInputGroup,
GlLink,
GlSprintf,
},
inject: ['noManifestsIllustration'],
i18n: {
codeExampleLabel: s__('DependencyProxy|Pull image by digest example'),
noManifestTitle: s__('DependencyProxy|There are no images in the cache'),
emptyText: s__(
'DependencyProxy|To store docker images in Dependency Proxy cache, pull an image by tag in your %{codeStart}.gitlab-ci.yml%{codeEnd} file. In this example, the image is %{codeStart}alpine:latest%{codeEnd}',
),
documentationText: s__(
'DependencyProxy|%{docLinkStart}See the documentation%{docLinkEnd} for other ways to store Docker images in Dependency Proxy cache.',
),
copyExample: s__('DependencyProxy|Copy example'),
},
// eslint-disable-next-line no-template-curly-in-string
codeExample: 'image: ${CI_DEPENDENCY_PROXY_DIRECT_GROUP_IMAGE_PREFIX}/alpine:latest',
links: {
DEPENDENCY_PROXY_HELP_PAGE_PATH,
},
};
</script>
<template>
<gl-empty-state :svg-path="noManifestsIllustration" :title="$options.i18n.noManifestTitle">
<template #description>
<p class="gl-mb-5">
<gl-sprintf :message="$options.i18n.emptyText">
<template #code="{ content }">
<code>{{ content }}</code>
</template>
</gl-sprintf>
</p>
<gl-form-group
class="gl-mb-5"
:label="$options.i18n.codeExampleLabel"
label-for="code-example"
label-sr-only
>
<gl-form-input-group
id="code-example"
readonly
:value="$options.codeExample"
class="gl-w-70p gl-mx-auto"
select-on-click
>
<template #append>
<clipboard-button
:text="$options.codeExample"
:title="$options.i18n.copyExample"
class="gl-m-0!"
/>
</template>
</gl-form-input-group>
</gl-form-group>
<p>
<gl-sprintf :message="$options.i18n.documentationText">
<template #docLink="{ content }">
<gl-link :href="$options.links.DEPENDENCY_PROXY_HELP_PAGE_PATH">{{ content }}</gl-link>
</template>
</gl-sprintf>
</p>
</template>
</gl-empty-state>
</template>

View File

@ -2,11 +2,13 @@
import { GlKeysetPagination, GlSkeletonLoader } from '@gitlab/ui';
import { s__ } from '~/locale';
import ManifestRow from '~/packages_and_registries/dependency_proxy/components/manifest_row.vue';
import ManifestsEmptyState from '~/packages_and_registries/dependency_proxy/components/manifests_empty_state.vue';
export default {
name: 'ManifestsLists',
components: {
ManifestRow,
ManifestsEmptyState,
GlKeysetPagination,
GlSkeletonLoader,
},
@ -18,7 +20,8 @@ export default {
},
pagination: {
type: Object,
required: true,
required: false,
default: () => ({}),
},
loading: {
type: Boolean,
@ -44,12 +47,18 @@ export default {
<template>
<div class="gl-mt-6">
<h3 class="gl-font-base">{{ $options.i18n.listTitle }}</h3>
<gl-skeleton-loader v-if="loading" />
<h3 class="gl-font-base gl-pb-3 gl-mb-0 gl-border-b-1 gl-border-gray-100 gl-border-b-solid">
{{ $options.i18n.listTitle }}
</h3>
<div v-if="loading" class="gl-py-3">
<gl-skeleton-loader />
</div>
<manifests-empty-state v-else-if="manifests.length === 0" />
<div v-else data-testid="main-area">
<div
class="gl-border-t-1 gl-border-gray-100 gl-border-t-solid gl-display-flex gl-flex-direction-column"
>
<div class="gl-display-flex gl-flex-direction-column">
<manifest-row
v-for="(manifest, index) in manifests"
:key="index"

View File

@ -1,2 +1,11 @@
import { helpPagePath } from '~/helpers/help_page_helper';
export const GRAPHQL_PAGE_SIZE = 20;
export const MANIFEST_PENDING_DESTRUCTION_STATUS = 'PENDING_DESTRUCTION';
export const DEPENDENCY_PROXY_HELP_PAGE_PATH = helpPagePath(
'user/packages/dependency_proxy/index',
{
anchor: 'store-a-docker-image-in-dependency-proxy-cache',
},
);

View File

@ -13,7 +13,7 @@ import { BV_HIDE_TOOLTIP } from '~/lib/utils/constants';
import { __, sprintf } from '~/locale';
import CancelPipelineMutation from '~/pipelines/graphql/mutations/cancel_pipeline.mutation.graphql';
import RetryPipelineMutation from '~/pipelines/graphql/mutations/retry_pipeline.mutation.graphql';
import CiStatus from '~/vue_shared/components/ci_icon.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import { reportToSentry } from '../../utils';
import { ACTION_FAILURE, DOWNSTREAM, UPSTREAM } from './constants';
@ -22,7 +22,7 @@ export default {
GlTooltip: GlTooltipDirective,
},
components: {
CiStatus,
CiIcon,
GlBadge,
GlButton,
GlLink,
@ -240,7 +240,7 @@ export default {
</gl-tooltip>
<div class="gl-bg-white gl-border gl-p-3 gl-rounded-lg gl-w-full" :class="cardClasses">
<div class="gl-display-flex gl-gap-x-3">
<ci-status v-if="!pipelineIsLoading" :status="pipelineStatus" :size="24" css-classes="" />
<ci-icon v-if="!pipelineIsLoading" :status="pipelineStatus" :size="24" />
<div v-else class="gl-pr-3"><gl-loading-icon size="sm" inline /></div>
<div
class="gl-display-flex gl-downstream-pipeline-job-width gl-flex-direction-column gl-line-height-normal"

View File

@ -159,7 +159,7 @@ class Admin::UsersController < Admin::ApplicationController
end
def unlock
if update_user(&:unlock_access!)
if unlock_user
redirect_back_or_admin_user(notice: _("Successfully unlocked"))
else
redirect_back_or_admin_user(alert: _("Error occurred. User was not unlocked"))
@ -401,6 +401,11 @@ class Admin::UsersController < Admin::ApplicationController
_("You cannot impersonate a user who cannot log in")
end
end
# method overriden in EE
def unlock_user
update_user(&:unlock_access!)
end
end
Admin::UsersController.prepend_mod_with('Admin::UsersController')

View File

@ -12,6 +12,7 @@ module IssuableActions
before_action :authorize_destroy_issuable!, only: :destroy
before_action :check_destroy_confirmation!, only: :destroy
before_action :authorize_admin_issuable!, only: :bulk_update
before_action :set_application_context!, only: :show
end
def show
@ -226,6 +227,10 @@ module IssuableActions
render_404 unless can?(current_user, :"update_#{resource_name}", issuable)
end
def set_application_context!
# no-op. The logic is defined in EE module.
end
def bulk_update_params
clean_bulk_update_params(
params.require(:update).permit(bulk_update_permitted_keys)

View File

@ -75,6 +75,8 @@ class Projects::MergeRequests::ConflictsController < Projects::MergeRequests::Ap
private
alias_method :issuable, :merge_request
def authorize_can_resolve_conflicts!
@conflicts_list = ::MergeRequests::Conflicts::ListService.new(@merge_request)

View File

@ -5,14 +5,6 @@ module NamespacesHelper
params.dig(:project, :namespace_id) || params[:namespace_id]
end
def namespace_icon(namespace, size = 40)
if namespace.is_a?(Group)
group_icon_url(namespace)
else
avatar_icon_for_user(namespace.owner, size)
end
end
def cascading_namespace_settings_popover_data(attribute, group, settings_path_helper)
locked_by_ancestor = group.namespace_settings.public_send("#{attribute}_locked_by_ancestor?") # rubocop:disable GitlabSecurity/PublicSend

View File

@ -2,12 +2,17 @@
module VulnerabilityFindingSignatureHelpers
extend ActiveSupport::Concern
# If the location object describes a physical location within a file
# (filename + line numbers), the 'location' algorithm_type should be used
# If the location object describes arbitrary data, then the 'hash'
# algorithm_type should be used.
ALGORITHM_TYPES = { hash: 1, location: 2, scope_offset: 3 }.with_indifferent_access.freeze
ALGORITHM_TYPES = {
hash: 1,
location: 2,
scope_offset: 3,
scope_offset_compressed: 4
}.with_indifferent_access.freeze
class_methods do
def priority(algorithm_type)

View File

@ -2067,6 +2067,13 @@ class User < ApplicationRecord
super
end
# override, from Devise
def unlock_access!(unlocked_by: self)
audit_unlock_access(author: unlocked_by)
super()
end
# Determine the maximum access level for a group of projects in bulk.
#
# Returns a Hash mapping project ID -> maximum access level.
@ -2593,6 +2600,9 @@ class User < ApplicationRecord
# method overriden in EE
def audit_lock_access; end
# method overriden in EE
def audit_unlock_access(author: self); end
end
User.prepend_mod_with('User')

View File

@ -1,11 +1,14 @@
- if session[:ask_for_usage_stats_consent]
= render Pajamas::AlertComponent.new(alert_options: { class: 'service-ping-consent-message' }) do |c|
- c.with_body do
- docs_link = link_to _('collect usage information'), help_page_path('user/admin_area/settings/usage_statistics.md'), class: 'gl-link'
- settings_link = link_to _('your settings'), metrics_and_profiling_admin_application_settings_path(anchor: 'js-usage-settings'), class: 'gl-link'
= s_('To help improve GitLab, we would like to periodically %{docs_link}. This can be changed at any time in %{settings_link}.').html_safe % { docs_link: docs_link, settings_link: settings_link }
- docs_link = link_to '', help_page_path('user/admin_area/settings/usage_statistics.md'), class: 'gl-link'
- settings_link = link_to '', metrics_and_profiling_admin_application_settings_path(anchor: 'js-usage-settings'), class: 'gl-link'
= safe_format s_('ServicePing|To help improve GitLab, we would like to periodically %{link_start}collect usage information%{link_end}.'), tag_pair(docs_link, :link_start, :link_end)
= safe_format s_('ServicePing|This can be changed at any time in %{link_start}your settings%{link_end}.'), tag_pair(settings_link, :link_start, :link_end)
- c.with_actions do
- send_service_data_path = admin_application_settings_path(application_setting: { version_check_enabled: 1, usage_ping_enabled: 1 })
- not_now_path = admin_application_settings_path(application_setting: { version_check_enabled: 0, usage_ping_enabled: 0 })
= link_to _("Send service data"), send_service_data_path, 'data-url' => admin_application_settings_path, method: :put, 'data-check-enabled': true, 'data-service-ping-enabled': true, class: 'js-service-ping-consent-action alert-link btn gl-button btn-confirm'
= link_to _("Don't send service data"), not_now_path, 'data-url' => admin_application_settings_path, method: :put, 'data-check-enabled': false, 'data-service-ping-enabled': false, class: 'js-service-ping-consent-action alert-link btn gl-button btn-default gl-ml-3'
= render Pajamas::ButtonComponent.new(href: send_service_data_path, method: :put, variant: :confirm, button_options: { 'data-url' => admin_application_settings_path, 'data-check-enabled': true, 'data-service-ping-enabled': true, class: 'js-service-ping-consent-action alert-link' }) do
= _('Send service data')
= render Pajamas::ButtonComponent.new(href: not_now_path, method: :put, button_options: { 'data-url' => admin_application_settings_path, 'data-check-enabled': false, 'data-service-ping-enabled': false, class: 'js-service-ping-consent-action alert-link gl-ml-3' }) do
= _("Don't send service data")

View File

@ -365,6 +365,7 @@ GitLab generates audit events when a cluster agent token is created or revoked.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/238177) in
GitLab 15.1, audit events when a user's two-factor authentication is disabled.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124169) in GitLab 16.2, audit events when a user's access is locked.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124973) in GitLab 16.2, audit events when a user's access is unlocked.
The following user actions on a GitLab instance generate instance audit events:
@ -378,6 +379,7 @@ The following user actions on a GitLab instance generate instance audit events:
- A user's personal access token was successfully or unsuccessfully created or revoked.
- A user's two-factor authentication was disabled.
- A user's access is locked.
- A user's access is unlocked.
#### User management

View File

@ -11,7 +11,7 @@ especially for actions that read or write to Git repositories. This information
helps benchmark file system performance against known good and bad real-world
systems.
Normally when talking about file system performance the biggest concern is
When talking about file system performance the biggest concern is
with Network File Systems (NFS). However, even some local disks can have slow
I/O. The information on this page can be used for either scenario.

View File

@ -1045,6 +1045,7 @@ Input type: `AdminSidekiqQueuesDeleteJobsInput`
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationadminsidekiqqueuesdeletejobsairesource"></a>`aiResource` | [`String`](#string) | Delete jobs matching ai_resource in the context metadata. |
| <a id="mutationadminsidekiqqueuesdeletejobsartifactsize"></a>`artifactSize` | [`String`](#string) | Delete jobs matching artifact_size in the context metadata. |
| <a id="mutationadminsidekiqqueuesdeletejobsartifactusedcdn"></a>`artifactUsedCdn` | [`String`](#string) | Delete jobs matching artifact_used_cdn in the context metadata. |
| <a id="mutationadminsidekiqqueuesdeletejobsartifactsdependenciescount"></a>`artifactsDependenciesCount` | [`String`](#string) | Delete jobs matching artifacts_dependencies_count in the context metadata. |

View File

@ -28,8 +28,8 @@ is recommended when [FIPS mode](../../development/fips_compliance.md) is enabled
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/225545) in GitLab 13.12.
Download a PyPI package file. The [simple API](#group-level-simple-api-entry-point)
normally supplies this URL.
Download a PyPI package file. The [simple API](#group-level-simple-api-entry-point)
usually supplies this URL.
```plaintext
GET groups/:id/-/packages/pypi/files/:sha256/:file_identifier
@ -142,7 +142,7 @@ This writes the downloaded file to `simple.html` in the current directory.
> Introduced in GitLab 12.10.
Download a PyPI package file. The [simple API](#project-level-simple-api-entry-point)
normally supplies this URL.
usually supplies this URL.
```plaintext
GET projects/:id/packages/pypi/files/:sha256/:file_identifier

View File

@ -215,7 +215,7 @@ end note
note top of "Load Balancer IP"
For local development,
it includes all local loopback interfaces
e.g. 127.0.0.1, 172.16.123.1, 192.168.0.1, etc.
for example, 127.0.0.1, 172.16.123.1, 192.168.0.1, etc.
end note
@enduml
@ -439,7 +439,7 @@ Stopped -up-> Starting : status=Starting
Terminated: Workspace has been deleted
Failed: Workspace is not ready due to\nvarious reasons(e.g. crashing container)
Failed: Workspace is not ready due to\nvarious reasons(for example, crashing container)
Failed -up-> Starting : status=Starting\n(container\nnot crashing)
Failed -right-> Stopped : status=Stopped
Failed -down-> Terminated : status=Terminated

View File

@ -232,12 +232,12 @@ coupled in the current implementation so we will break them out here to consider
them each separately.
- **Virtual Machine (VM) shape**. The underlying provider of a VM requires configuration to
know what kind of machine to create. E.g. Cores, memory, failure domain,
know what kind of machine to create. For example, Cores, memory, failure domain,
etc... This information is very provider specific.
- **VM lifecycle management**. Multiple machines will be created and a
system must keep track of which machines belong to this executor. Typically
a cloud provider will have a way to manage a set of homogeneous machines.
E.g. GCE Instance Group. The basic operations are increase, decrease and
For example, GCE Instance Group. The basic operations are increase, decrease and
usually delete a specific machine.
- **VM autoscaling**. In addition to low-level lifecycle management,
job-aware capacity decisions must be made to the set of machines to provide
@ -255,7 +255,7 @@ See also Glossary below.
#### Current state
The current architecture has several points of coupling between concerns.
Coupling reduces opportunities for abstraction (e.g. community supported
Coupling reduces opportunities for abstraction (for example, community supported
plugins) and increases complexity, making the code harder to understand,
test, maintain and extend.

View File

@ -238,7 +238,7 @@ The new workflow looks as follows:
1. Creates a new runner in the `ci_runners` table (and corresponding `glrt-` prefixed authentication token);
1. Presents the user with instructions on how to configure this new runner on a machine,
with possibilities for different supported deployment scenarios (e.g. shell, `docker-compose`, Helm chart, etc.)
with possibilities for different supported deployment scenarios (for example, shell, `docker-compose`, Helm chart, etc.)
This information contains a token which is available to the user only once, and the UI
makes it clear to the user that the value shall not be shown again, as registering the same runner multiple times
is discouraged (though not impossible).

View File

@ -31,7 +31,7 @@ to write such end-to-end tests, and how to set up GitLab CI/CD to automatically
against your new code, on a branch-by-branch basis. For the scope of this article, we will walk you
through the process of setting up GitLab CI/CD for end-to-end testing JavaScript-based applications
with WebdriverIO, but the general strategy should carry over to other languages.
We assume you are familiar with GitLab, [GitLab CI/CD](../../index.md), [Review Apps](../../review_apps/index.md), and running your app locally, e.g., on `localhost:8000`.
We assume you are familiar with GitLab, [GitLab CI/CD](../../index.md), [Review Apps](../../review_apps/index.md), and running your app locally, for example, on `localhost:8000`.
## What to test
@ -45,7 +45,7 @@ infrastructure is up and running, and that your units of code work well together
## Selenium and WebdriverIO
[Selenium](https://www.selenium.dev/) is a piece of software that can control web browsers, e.g., to make them
[Selenium](https://www.selenium.dev/) is a piece of software that can control web browsers, for example, to make them
visit a specific URL or interact with elements on the page. It can be programmatically controlled
from a variety of programming languages. In this article we're going to be using the
[WebdriverIO](http://v4.webdriver.io/) JavaScript bindings, but the general concept should carry over
@ -115,7 +115,7 @@ easiest way to get started is to start with
provides an overview of all available options. The two options that are going to be most relevant now are the
`specs` option, which is an array of paths to your tests, and the `baseUrl` option, which points to where your app is
running. And finally, we will need to tell WebdriverIO in which browsers we would like to run our
tests. This can be configured through the `capabilities` option, which is an array of browser names (e.g.
tests. This can be configured through the `capabilities` option, which is an array of browser names (for example,
`firefox` or `chrome`). It is recommended to install
[selenium-assistant](https://googlechromelabs.github.io/selenium-assistant/) to detect all installed
browsers:
@ -130,7 +130,7 @@ But of course, a simple configuration of `config.capabilities = ['firefox']` wou
If you've installed WebdriverIO as a dependency
(`npm install --save-dev webdriverio`), you can add a line to the `scripts` property in your
`package.json` that runs `wdio` with the path to your configuration file as value, e.g.:
`package.json` that runs `wdio` with the path to your configuration file as value, for example:
```javascript
"confidence-check": "wdio wdio.conf.js",

View File

@ -4790,6 +4790,6 @@ important to describe those, too. Think of things that may go wrong and include
This is important to minimize requests for support, and to avoid doc comments with
questions that you know someone might ask.
Each scenario can be a third-level heading, e.g. `### Getting error message X`.
Each scenario can be a third-level heading, for example, `### Getting error message X`.
If you have none to add when creating a doc, leave this section in place
but commented out to help encourage others to add to it in the future. -->

View File

@ -179,7 +179,7 @@ request that has an optional parameter:
optional :user_ids, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'The user ids for this rule'
```
Normally, a request to PUT `/test?user_ids` would cause Grape to pass
Usually, a request to PUT `/test?user_ids` would cause Grape to pass
`params` of `{ user_ids: nil }`.
This may introduce errors with endpoints that expect a blank array and

View File

@ -1011,7 +1011,7 @@ Puma. All these components should run as different system users to GitLab
(for example, `postgres`, `redis`, and `www-data`, instead of `git`).
As the `git` user it starts Sidekiq and Puma (a simple Ruby HTTP server
running on port `8080` by default). Under the GitLab user there are normally 4
running on port `8080` by default). Under the GitLab user there are usually 4
processes: `puma master` (1 process), `puma cluster worker`
(2 processes), `sidekiq` (1 process).
@ -1067,7 +1067,7 @@ Usage: /etc/init.d/postgresql {start|stop|restart|reload|force-reload|status} [v
GitLab (includes Puma and Sidekiq logs):
- `/home/git/gitlab/log/` contains `application.log`, `production.log`, `sidekiq.log`, `puma.stdout.log`, `git_json.log` and `puma.stderr.log` normally.
- `/home/git/gitlab/log/` usually contains `application.log`, `production.log`, `sidekiq.log`, `puma.stdout.log`, `git_json.log` and `puma.stderr.log`.
GitLab Shell:

View File

@ -143,7 +143,7 @@ user to the pipeline, for example.
### Template file location
Template files are normally stored as YAML files in `~/pipeline_wizard/templates/`.
Template files are usually stored as YAML files in `~/pipeline_wizard/templates/`.
The `PipelineWizard` component expects the `template` property as an un-parsed `String`,
and Webpack is configured to load `.yml` files from the above folder as strings.

View File

@ -31,7 +31,7 @@ We were using Overcommit prior to Lefthook, so you may want to uninstall it firs
### Install Lefthook
1. You can install lefthook in [different ways](https://github.com/evilmartians/lefthook/blob/master/docs/install.md#install-lefthook).
If you do not choose to install it globally (e.g. via Homebrew or package managers), and only want to use it for the GitLab project,
If you do not choose to install it globally (for example, via Homebrew or package managers), and only want to use it for the GitLab project,
you can install the Ruby gem via:
```shell

View File

@ -141,7 +141,7 @@ the key differences between these classes are listed in the table below.
| `insert_all!` | Attribute hashes | No | No | Yes | Yes |
To summarize, `BulkInsertSafe` moves bulk inserts closer to how ActiveRecord objects
and inserts would normally behave. However, if all you need is to insert raw data in bulk, then
and inserts would usually behave. However, if all you need is to insert raw data in bulk, then
`insert_all` is more efficient.
## Insert `has_many` associations in bulk

View File

@ -32,8 +32,8 @@ Migrations cannot mix **DDL** and **DML** changes as the application requires th
The DDL migrations are all migrations that:
1. Create or drop a table (for example, `create_table`).
1. Add or remove an index (for example, `add_index`, `add_index_concurrently`).
1. Add or remove a foreign key (for example `add_foreign_key`, `add_foreign_key_concurrently`).
1. Add or remove an index (for example, `add_index`, `add_concurrent_index`).
1. Add or remove a foreign key (for example `add_foreign_key`, `add_concurrent_foreign_key`).
1. Add or remove a column with or without a default value (for example, `add_column`).
1. Create or drop trigger functions (for example, `create_trigger_function`).
1. Attach or detach triggers from tables (for example, `track_record_deletions`, `untrack_record_deletions`).

View File

@ -33,7 +33,7 @@ release managers through the
Say you're using Chef for deploying new versions of GitLab and you'd like to run
post deployment migrations after deploying a new version. Let's assume you
normally use the command `chef-client` to do so. To make use of this feature
usually use the command `chef-client` to do so. To make use of this feature
you'd have to run this command as follows:
```shell
@ -63,7 +63,7 @@ behave exactly like regular Rails migrations.
Post deployment migrations can be used to perform migrations that mutate state
that an existing version of GitLab depends on. For example, say you want to
remove a column from a table. This requires downtime as a GitLab instance
depends on this column being present while it's running. Normally you'd follow
depends on this column being present while it's running. Usually you'd follow
these steps in such a case:
1. Stop the GitLab instance

View File

@ -244,7 +244,7 @@ Include in the MR description:
- Manually trigger the [database testing](database/database_migration_pipeline.md) job (`db:gitlabcom-database-testing`) in the `test` stage.
- If a single `update` is below than `1s` the query can be placed
directly in a regular migration (inside `db/migrate`).
- Background migrations are normally used, but not limited to:
- Background migrations are usually used, but not limited to:
- Migrating data in larger tables.
- Making numerous SQL queries per record in a dataset.
- Review queries (for example, make sure batch sizes are fine)

View File

@ -40,7 +40,7 @@ better understand the end-to-end path of a request through the system. When a re
process boundaries, the correlation ID is injected into the outgoing request. This enables
the propagation of the correlation ID to each downstream subsystem.
Correlation IDs are normally generated in the Rails application in response to
Correlation IDs are usually generated in the Rails application in response to
certain web requests. Some user facing systems don't generate correlation IDs in
response to user requests (for example, Git pushes over SSH).
@ -139,8 +139,8 @@ This can be shown by typing `p` `b` in the browser window.
Once the performance bar is enabled, select **Trace** in the performance bar to go to
the Jaeger UI.
The Jaeger search UI returns a query for the `Correlation-ID` of the current request. Normally,
this search should return a single trace result. Selecting this result shows the detail of the
The Jaeger search UI returns a query for the `Correlation-ID` of the current request.
This search should return a single trace result. Selecting this result shows the detail of the
trace in a hierarchical time-line.
![Jaeger Search UI](img/distributed_tracing_jaeger_ui.png)

View File

@ -223,7 +223,7 @@ merge it as early as possible.
### Linking to `/help`
When you're building a new feature, you may need to link to the documentation
from the GitLab application. This is normally done in files inside the
from the GitLab application. This is usually done in files inside the
`app/views/` directory, with the help of the `help_page_path` helper method.
The `help_page_path` contains the path to the document you want to link to,

View File

@ -222,7 +222,7 @@ This also applies to views.
#### Testing EE-only backend features
To test an EE class that doesn't exist in CE, create the spec file as you normally
To test an EE class that doesn't exist in CE, create the spec file as you usually
would in the `ee/spec` directory, but without the second `ee/` subdirectory.
For example, a class `ee/app/models/vulnerability.rb` would have its tests in `ee/spec/models/vulnerability_spec.rb`.
@ -303,7 +303,7 @@ This is also not just applied to models. Here's a list of other examples:
#### Testing EE features based on CE features
To test an `EE` namespaced module that extends a CE class with EE features,
create the spec file as you normally would in the `ee/spec` directory, including the second `ee/` subdirectory.
create the spec file as you usually would in the `ee/spec` directory, including the second `ee/` subdirectory.
For example, an extension `ee/app/models/ee/user.rb` would have its tests in `ee/spec/models/ee/user_spec.rb`.
In the `RSpec.describe` call, use the CE class name where the EE module would be used.
@ -713,7 +713,7 @@ end
### Code in `lib/`
Place EE-specific logic in the top-level `EE` module namespace. Namespace the
class beneath the `EE` module just as you would normally.
class beneath the `EE` module as you usually would.
For example, if CE has LDAP classes in `lib/gitlab/ldap/` then you would place
EE-specific LDAP classes in `ee/lib/ee/gitlab/ldap`.
@ -870,7 +870,7 @@ end
#### EE-specific behavior
Sometimes we need EE-specific behavior in some of the APIs. Normally we could
Sometimes we need EE-specific behavior in some of the APIs. Usually we could
use EE methods to override CE methods, however API routes are not methods and
therefore cannot be overridden. We need to extract them into a standalone
method, or introduce some "hooks" where we could inject behavior in the CE

View File

@ -11,7 +11,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
[Examples](https://gitlab.com/gitlab-org/growth/growth/-/wikis/GLEX-Framework-code-examples)
Start by generating a feature flag using the `bin/feature-flag` command as you
normally would for a development feature flag, making sure to use `experiment` for
usually would for a development feature flag, making sure to use `experiment` for
the type. For the sake of documentation let's name our feature flag (and experiment)
`pill_color`.
@ -280,7 +280,7 @@ about contexts now.
We can assume we run the experiment in one or a few places, but
track events potentially in many places. The tracking call remains the same, with
the arguments you would normally use when
the arguments you would usually use when
[tracking events using snowplow](../snowplow/index.md). The easiest example
of tracking an event in Ruby would be:

View File

@ -36,7 +36,7 @@ try {
} catch (e) {
if (e instanceof FooSyntaxError) {
// To handle a `FooSyntaxError`, we just need to instruct the user to change their input.
// This isn't unexpected, and is part of normal operations.
// This isn't unexpected, and is part of standard operations.
setUserMessage(`Try writing better code. ${e.message}`);
} else {
// We're not sure what `e` is, so something unexpected and bad happened...

View File

@ -284,7 +284,7 @@ To set this initial state, pass it as a parameter to your store's creation
function when mounting your Vue component:
```javascript
// in the Vue app's initialization script (e.g. mount_show.js)
// in the Vue app's initialization script (for example, mount_show.js)
import Vue from 'vue';
import Vuex from 'vuex';

View File

@ -149,7 +149,7 @@ created using the [Experiment Tracking template](https://gitlab.com/gitlab-org/g
`worker` feature flags are used for controlling Sidekiq workers behavior, such as deferring Sidekiq jobs.
`worker` feature flags likely do not have any YAML definition as the name could be dynamically generated using
the worker name itself, e.g. `run_sidekiq_jobs_AuthorizedProjectsWorker`. Some examples for using `worker` type feature
the worker name itself, for example, `run_sidekiq_jobs_AuthorizedProjectsWorker`. Some examples for using `worker` type feature
flags can be found in [deferring Sidekiq jobs](#deferring-sidekiq-jobs).
## Feature flag definition and validation
@ -348,7 +348,7 @@ Use the `push_frontend_feature_flag` method which is available to all controller
```ruby
before_action do
# Prefer to scope it per project or user e.g.
# Prefer to scope it per project or user, for example
push_frontend_feature_flag(:vim_bindings, project)
end

View File

@ -133,10 +133,10 @@ The following taxonomy chart shows the taxonomy and terminology of the various s
```mermaid
graph TD
CM[CommonMark - spec.txt - e.g. headings] --- GFMS[GFM Specification - spec.txt - e.g. strikethrough extension]
GFMS --- GLFM[GLFM Specification - e.g. color chips]
GFMS --- GFMI[GFM internal extensions - e.g. GitHub-specific references]
GLFM --- GLFS[GLFM internal extensions - e.g. GitLab-specific references]
CM[CommonMark - spec.txt - for example, headings] --- GFMS[GFM Specification - spec.txt - for example, strikethrough extension]
GFMS --- GLFM[GLFM Specification - for example, color chips]
GFMS --- GFMI[GFM internal extensions - for example, GitHub-specific references]
GLFM --- GLFS[GLFM internal extensions - for example, GitLab-specific references]
```
##### Official specifications

View File

@ -46,8 +46,9 @@ The `AttributeConfigurationSpec` checks and confirms the addition of new columns
<<-MSG
It looks like #{relation_class}, which is exported using the project Import/Export, has new attributes:
Please add the attribute(s) to SAFE_MODEL_ATTRIBUTES if you consider this can be exported.
Otherwise, please blacklist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its correspondent
Please add the attribute(s) to SAFE_MODEL_ATTRIBUTES if they can be exported.
Please denylist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its corresponding
model in the +excluded_attributes+ section.
SAFE_MODEL_ATTRIBUTES: #{File.expand_path(safe_attributes_file)}

View File

@ -229,13 +229,13 @@ end
Migrations like this are inherently risky and [additional actions](database_review.md#preparation-when-adding-data-migrations)
are required when preparing the migration for review.
## Atomicity
## Atomicity and transaction
By default, migrations are single transaction. That is, a transaction is opened
By default, migrations are a single transaction: it's opened
at the beginning of the migration, and committed after all steps are processed.
Running migrations in a single transaction makes sure that if one of the steps fails,
none of the steps are executed, leaving the database in valid state.
none of the steps are executed, leaving the database in a valid state.
Therefore, either:
- Put all migrations in one single-transaction migration.
@ -243,11 +243,141 @@ Therefore, either:
for the steps that cannot be done in a single transaction.
For example, if you create an empty table and need to build an index for it,
it is recommended to use a regular single-transaction migration and the default
you should use a regular single-transaction migration and the default
rails schema statement: [`add_index`](https://api.rubyonrails.org/classes/ActiveRecord/ConnectionAdapters/SchemaStatements.html#method-i-add_index).
This is a blocking operation, but it doesn't cause problems because the table is not yet used,
This operation is a blocking operation, but it doesn't cause problems because the table is not yet used,
and therefore it does not have any records yet.
NOTE:
Subtransactions are [disallowed](https://about.gitlab.com/blog/2021/09/29/why-we-spent-the-last-month-eliminating-postgresql-subtransactions/) in general.
Use multiple, separate transactions
if needed as described in [Heavy operations in a single transaction](#heavy-operations-in-a-single-transaction).
### Heavy operations in a single transaction
When using a single-transaction migration, a transaction holds a database connection
for the duration of the migration, so you must make sure the actions in the migration
do not take too much time.
In general, transactions must [execute quickly](database/transaction_guidelines.md#transaction-speed).
To that end, observe [the maximum query time limit](database/query_performance.md#timing-guidelines-for-queries)
for each query run in the migration.
If your single-transaction migration takes long to finish, you have several options.
In all cases, remember to select the appropriate migration type
depending on [how long a migration takes](#how-long-a-migration-should-take)
- Split the migration into **multiple single-transaction migrations**.
- Use **multiple transactions** by [using `disable_ddl_transaction!`](#disable-transaction-wrapped-migration).
- Keep using a single-transaction migration after **adjusting statement and lock timeout settings**.
If your heavy workload must use the guarantees of a transaction,
you should check your migration can execute without hitting the timeout limits.
The same advice applies to both single-transaction migrations and individual transactions.
- Statement timeout: the statement timeout is configured to be `15s` for GitLab.com's production database
but creating an index often takes more than 15 seconds.
When you use the existing helpers including `add_concurrent_index`,
they automatically turn off the statement timeout as needed.
In rare cases, you might need to set the timeout limit yourself by [using `disable_statement_timeout`](#temporarily-turn-off-the-statement-timeout-limit).
- Lock timeout: if your migration must execute as a transaction but can possibly time out while
acquiring a lock, [use `enable_lock_retries!`](#usage-with-transactional-migrations).
NOTE:
To run migrations, we directly connect to the primary database, bypassing PgBouncer
to control settings like `statement_timeout` and `lock_wait_timeout`.
#### Temporarily turn off the statement timeout limit
The migration helper `disable_statement_timeout` enables you to
temporarily set the statement timeout to `0` per transaction or per connection.
- You use the per-connection option when your statement does not support
running inside an explicit transaction, like `CREATE INDEX CONCURRENTLY`.
- If your statement does support an explicit transaction block,
like `ALTER TABLE ... VALIDATE CONSTRAINT`,
the per-transaction option should be used.
Using `disable_statement_timeout` is rarely needed, because
the most migration helpers already use them internally when needed.
For example, creating an index usually takes more than 15 seconds,
which is the default statement timeout configured for GitLab.com's production database.
The helper `add_concurrent_index` creates an index inside the block
passed to `disable_statement_timeout` to disable the statement timeout per connection.
If you are writing raw SQL statements in a migration,
you may need to manually use `disable_statement_timeout`.
Consult the database reviewers and maintainers when you do.
### Disable transaction-wrapped migration
You can opt out of running your migration as a single transaction by using
`disable_ddl_transaction!`, an ActiveRecord method.
The method might be called in other database systems, with different results.
At GitLab we exclusively use PostgreSQL.
You should always read `disable_ddl_transaction!` as meaning:
"Do not execute this migration in a single PostgreSQL transaction. I'll open PostgreSQL transaction(s) only _when_ and _if_ I need them."
NOTE:
Even if you don't use an explicit PostgreSQL transaction `.transaction` (or `BEGIN; COMMIT;`),
every SQL statement is still executed as a transaction.
See [the PostgreSQL documentation on transactions](https://www.postgresql.org/docs/current/tutorial-transactions.html).
NOTE:
In GitLab, we've sometimes referred to
the migrations that used `disable_ddl_transaction!` as non-transactional migrations.
It just meant the migrations were not executed as _single_ transactions.
When should you use `disable_ddl_transaction!`? In most cases,
the existing RuboCop rules or migration helpers can detect if you should be
using `disable_ddl_transaction!`.
Skip `disable_ddl_transaction!` if you are unsure whether to use it or not in your migration,
and let the RuboCop rules and database reviews guide you.
Use `disable_ddl_transaction!` when PostgreSQL requires an operation to be executed outside an explicit transaction.
- The most prominent example of such operation is the command `CREATE INDEX CONCURRENTLY`.
PostgreSQL allows the blocking version (`CREATE INDEX`) to be run inside a transaction.
Unlike `CREATE INDEX`, `CREATE INDEX CONCURRENTLY` must be performed outside a transaction.
Therefore, even though a migration may run just one statement `CREATE INDEX CONCURRENTLY`,
you should disable `disable_ddl_transaction!`.
It's also the reason why the use of the helper `add_concurrent_index` requires `disable_ddl_transaction!`
`CREATE INDEX CONCURRENTLY` is more of the exception than the rule.
Use `disable_ddl_transaction!` when you need to run multiple transactions in a migration for any reason.
Most of the time you would be using multiple transactions to avoid [running one slow transaction](#heavy-operations-in-a-single-transaction).
- For example, when you insert, update, or delete (DML) a large amount of data,
you should [perform them in batches](database/iterating_tables_in_batches.md#eachbatch-in-data-migrations).
Should you need to group operations for each batch,
you can explicitly open a transaction block when processing a batch.
Consider using a [batched background migration](database/batched_background_migrations.md) for
any reasonably large workload.
Use `disable_ddl_transaction!` when migration helpers require them.
Various migration helpers need to run with `disable_ddl_transaction!`
because they require a precise control on when and how to open transactions.
- A foreign key _can_ be added inside a transaction, unlike `CREATE INDEX CONCURRENTLY`.
However, PostgreSQL does not provide an option similar to `CREATE INDEX CONCURRENTLY`.
The helper [`add_concurrent_foreign_key`](database/foreign_keys.md#adding-foreign-keys-in-migrations)
instead opens its own transactions to lock the source and target table
in a manner that minimizes locking while adding and validating the foreign key.
- As advised earlier, skip `disable_ddl_transaction!` if you are unsure
and see if any RuboCop check is violated.
Use `disable_ddl_transaction!` when your migration does not actually touch PostgreSQL databases
or does touch _multiple_ PostgreSQL databases.
- For example, your migration might target a Redis server. As a rule,
you cannot [interact with an external service](database/transaction_guidelines.md#dangerous-example-third-party-api-calls)
inside a PostgreSQL transaction.
- A transaction is used for a single database connection.
If your migrations are targeting multiple databases, such as both `ci` and `main` database,
follow [Migrations for multiple databases](database/migrations_for_multiple_databases.md).
## Naming conventions
Names for database objects (such as tables, indexes, and views) must be lowercase.
@ -290,19 +420,6 @@ minimum acceptable timestamp would be 20230424000000.
While the above should be considered a hard rule, it is a best practice to try to keep migration timestamps to within three weeks of the date it is anticipated that the migration will be merged upstream, regardless of how much time has elapsed since the last hard stop.
## Heavy operations in a single transaction
When using a single-transaction migration, a transaction holds a database connection
for the duration of the migration, so you must make sure the actions in the migration
do not take too much time: GitLab.com's production database has a `15s` timeout, so
in general, the cumulative execution time in a migration should aim to fit comfortably
in that limit. Singular query timings should fit within the [standard limit](database/query_performance.md#timing-guidelines-for-queries)
In case you need to insert, update, or delete a significant amount of data, you:
- Must disable the single transaction with `disable_ddl_transaction!`.
- Should consider doing it in a [batched background migration](database/batched_background_migrations.md).
## Migration helpers and versioning
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/339115) in GitLab 14.3.
@ -610,6 +727,71 @@ like a standard migration invocation.
The migration might fail if there is a very long running transaction (40+ minutes)
accessing the `users` table.
#### Lock-retry methodology at the SQL level
In this section, we provide a simplified SQL example that demonstrates the use of `lock_timeout`.
You can follow along by running the given snippets in multiple `psql` sessions.
When altering a table to add a column,
`AccessExclusiveLock`, which conflicts with most lock types, is required on the table.
If the target table is a very busy one, the transaction adding the column
may fail to acquire `AccessExclusiveLock` in a timely fashion.
Suppose a transaction is attempting to insert a row into a table:
```sql
-- Transaction 1
BEGIN;
INSERT INTO my_notes (id) VALUES (1);
```
At this point Transaction 1 acquired `RowExclusiveLock` on `my_notes`.
Transaction 1 could still execute more statements prior to committing or aborting.
There could be other similar, concurrent transactions that touch `my_notes`.
Suppose a transactional migration is attempting to add a column to the table
without using any lock retry helper:
```sql
-- Transaction 2
BEGIN;
ALTER TABLE my_notes ADD COLUMN title text;
```
Transaction 2 is now blocked because it cannot acquire
`AccessExclusiveLock` on `my_notes` table
as Transaction 1 is still executing and holding the `RowExclusiveLock`
on `my_notes`.
A more pernicious effect is blocking the transactions that would
normally not conflict with Transaction 1 because Transaction 2
is queueing to acquire `AccessExclusiveLock`.
In a normal situation, if another transaction attempted to read from and write
to the same table `my_notes` at the same time as Transaction 1,
the transaction would go through
since the locks needed for reading and writing would not
conflict with `RowExclusiveLock` held by Transaction 1.
However, when the request to acquire `AccessExclusiveLock` is queued,
the subsequent requests for conflicting locks on the table would block although
they could be executed concurrently alongside Transaction 1.
If we used `with_lock_retries`, Transaction 2 would instead quickly
timeout after failing to acquire the lock within the specified time period
and allow other transactions to proceed:
```sql
-- Transaction 2 (version with with lock timeout)
BEGIN;
SET LOCAL lock_timeout to '100ms'; -- added by the lock retry helper.
ALTER TABLE my_notes ADD COLUMN title text;
```
The lock retry helper would repeatedly try the same transaction
at different time intervals until it succeeded.
Note that `SET LOCAL` scopes the parameter (`lock_timeout`) change to
the transaction.
## Removing indexes
If the table is not empty when removing an index, make sure to use the method

View File

@ -48,11 +48,11 @@ However, even though the actual handling of the request interception and
modal is transparent, without any mandatory changes to the involved JavaScript or Vue components
for the form or page, changes in request or error handling may be required. Changes are needed
because the existing behavior may not work correctly: for example, if a failed or cancelled
CAPTCHA display interrupts the normal request flow or UI updates.
CAPTCHA display interrupts the standard request flow or UI updates.
Careful exploratory testing of all scenarios is important to uncover any potential
problems.
This sequence diagram illustrates the normal CAPTCHA flow for JavaScript XHR/Fetch requests
This sequence diagram illustrates the standard CAPTCHA flow for JavaScript XHR/Fetch requests
on the frontend:
```mermaid
@ -73,7 +73,7 @@ sequenceDiagram
```
The backend is also cleanly abstracted via mixin modules and helper methods. The three main
changes required to the relevant backend controller actions (normally just `create`/`update`) are:
changes required to the relevant backend controller actions (typically just `create`/`update`) are:
1. Pass `perform_spam_check: true` to the Update Service class constructor.
It is set to `true` by default in the Create Service.
@ -86,7 +86,7 @@ changes required to the relevant backend controller actions (normally just `crea
1. Checking if there the model contains an error, and the `needs_recaptcha` flag is true.
- If yes: Add the appropriate spam or CAPTCHA fields to the JSON response, and return
a `409 - Conflict` HTTP status code.
- If no (if CAPTCHA is disabled or if no spam was detected): The normal request return
- If no (if CAPTCHA is disabled or if no spam was detected): The standard request return
logic passed in the block is run.
Thanks to the abstractions, it's more straightforward to implement than it is to explain it.

View File

@ -540,7 +540,7 @@ describe('when logged in', () => {
### Ensuring that tests are isolated
Tests are normally architected in a pattern which requires a recurring setup of the component under test. This is often achieved by making use of the `beforeEach` hook.
Tests are typically architected in a pattern which requires a recurring setup of the component under test. This is often achieved by making use of the `beforeEach` hook.
Example
@ -1225,7 +1225,7 @@ You can download any older version of Firefox from the releases FTP server, <htt
1. Rename the application to something like `Firefox_Old`.
1. Move the application to the `Applications` folder.
1. Open up a terminal and run `/Applications/Firefox_Old.app/Contents/MacOS/firefox-bin -profilemanager` to create a new profile specific to that Firefox version.
1. Once the profile has been created, quit the app, and run it again like normal. You now have a working older Firefox version.
1. Once the profile has been created, quit the app, and run it again like usual. You now have a working older Firefox version.
## Snapshots
@ -1742,7 +1742,7 @@ If you are stubbing an `ee` feature flag, then use:
You can run your spec with the prefix `WEBDRIVER_HEADLESS=0` to open an actual browser. However, the specs goes though the commands quickly and leaves you no time to look around.
To avoid this problem, you can write `binding.pry` on the line where you want Capybara to stop execution. You are then inside the browser with normal usage. To understand why you cannot find certain elements, you can:
To avoid this problem, you can write `binding.pry` on the line where you want Capybara to stop execution. You are then inside the browser with standard usage. To understand why you cannot find certain elements, you can:
- Select elements.
- Use the console and network tab.

View File

@ -37,7 +37,7 @@ Sec-WebSocket-Protocol: terminal.gitlab.com
```
At this point, the connection is still HTTP, so this is a request.
The server can send a normal HTTP response, such as `404 Not Found` or
The server can send a standard HTTP response, such as `404 Not Found` or
`500 Internal Server Error`.
If the server decides to permit the upgrade, it sends a HTTP
@ -116,7 +116,7 @@ contain ANSI terminal control codes, and may be in any encoding.
## Workhorse to GitLab
Using the terminal as an example, before upgrading the browser,
Workhorse sends a normal HTTP request to GitLab on a URL like
Workhorse sends a standard HTTP request to GitLab on a URL like
`https://gitlab.com/group/project/environments/1/terminal.ws/authorize`.
This returns a JSON response containing details of where the
terminal can be found, and how to connect it. In particular,

View File

@ -186,7 +186,7 @@ If EKS node autoscaling is employed, it is likely that your average loading will
- [3K AutoScale from 25% GPT Test Results](https://gitlab.com/guided-explorations/aws/implementation-patterns/gitlab-cloud-native-hybrid-on-eks/-/blob/master/gitlab-alliances-testing/3K/3k-QuickStart-AutoScale-ARM-RDS-Cache_v13-12-3-ee_2021-07-23_194200/3k-QuickStart-AutoScale-ARM-RDS-Cache_v13-12-3-ee_2021-07-23_194200_results.txt)
Elastic Auto Scale GPT Test Results start with an idle scaled cluster and then start the standard GPT test to determine if the EKS Auto Scaler performs well enough to keep up with performance test demands. In general this is substantially harder ramping than the scaling required when the ramping is driven my normal production workloads.
Elastic Auto Scale GPT Test Results start with an idle scaled cluster and then start the standard GPT test to determine if the EKS Auto Scaler performs well enough to keep up with performance test demands. In general this is substantially harder ramping than the scaling required when the ramping is driven by standard production workloads.
**Deploy Now**
@ -240,7 +240,7 @@ If EKS node autoscaling is employed, it is likely that your average loading will
- [5K AutoScale from 25% GPT Test Results](https://gitlab.com/guided-explorations/aws/implementation-patterns/gitlab-cloud-native-hybrid-on-eks/-/blob/master/gitlab-alliances-testing/5K/5k-QuickStart-AutoScale-From-25Percent-ARM-RDS-Redis_v13-12-3-ee_2021-07-24_102717/5k-QuickStart-AutoScale-From-25Percent-ARM-RDS-Redis_v13-12-3-ee_2021-07-24_102717_results.txt)
Elastic Auto Scale GPT Test Results start with an idle scaled cluster and then start the standard GPT test to determine if the EKS Auto Scaler performs well enough to keep up with performance test demands. In general this is substantially harder ramping than the scaling required when the ramping is driven my normal production workloads.
Elastic Auto Scale GPT Test Results start with an idle scaled cluster and then start the standard GPT test to determine if the EKS Auto Scaler performs well enough to keep up with performance test demands. In general this is substantially harder ramping than the scaling required when the ramping is driven by standard production workloads.
**Deploy Now**
@ -294,7 +294,7 @@ If EKS node autoscaling is employed, it is likely that your average loading will
- [10K Elastic Auto Scale GPT Test Results](https://gitlab.com/guided-explorations/aws/implementation-patterns/gitlab-cloud-native-hybrid-on-eks/-/blob/master/gitlab-alliances-testing/10K/GL-CloudNative-10k-AutoScaling-Test_v13-12-3-ee_2021-07-09_115139/GL-CloudNative-10k-AutoScaling-Test_v13-12-3-ee_2021-07-09_115139_results.txt)
Elastic Auto Scale GPT Test Results start with an idle scaled cluster and then start the standard GPT test to determine if the EKS Auto Scaler performs well enough to keep up with performance test demands. In general this is substantially harder ramping than the scaling required when the ramping is driven my normal production workloads.
Elastic Auto Scale GPT Test Results start with an idle scaled cluster and then start the standard GPT test to determine if the EKS Auto Scaler performs well enough to keep up with performance test demands. In general this is substantially harder ramping than the scaling required when the ramping is driven by standard production workloads.
**Deploy Now**
@ -347,7 +347,7 @@ If EKS node autoscaling is employed, it is likely that your average loading will
- [50K Elastic Auto Scale GPT Test Results](https://gitlab.com/guided-explorations/aws/implementation-patterns/gitlab-cloud-native-hybrid-on-eks/-/blob/master/gitlab-alliances-testing/50K/50k-AutoScale-Test_v13-12-3-ee_2021-08-13_192633/50k-AutoScale-Test_v13-12-3-ee_2021-08-13_192633.txt)
Elastic Auto Scale GPT Test Results start with an idle scaled cluster and then start the standard GPT test to determine if the EKS Auto Scaler performs well enough to keep up with performance test demands. In general this is substantially harder ramping than the scaling required when the ramping is driven my normal production workloads.
Elastic Auto Scale GPT Test Results start with an idle scaled cluster and then start the standard GPT test to determine if the EKS Auto Scaler performs well enough to keep up with performance test demands. In general this is substantially harder ramping than the scaling required when the ramping is driven by standard production workloads.
**Deploy Now**

View File

@ -9,7 +9,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
## Description
A `Content-Security-Policy-Report-Only` (CSPRO) was identified on the target site. CSP-Report-Only headers
aid in determining how to implement a `Content-Security-Policy` that does not disrupt normal use of the target
aid in determining how to implement a `Content-Security-Policy` that does not disrupt use of the target
site.
## Remediation

View File

@ -161,10 +161,6 @@ Prerequisites:
To close an epic, at the top of an epic, select **Close epic**.
<!-- Delete when the `moved_mr_sidebar` feature flag is removed -->
If you don't see this action at the top of an epic, your project or instance might have
enabled a feature flag for [moved actions](../../project/merge_requests/index.md#move-sidebar-actions)
You can also use the `/close` [quick action](../../project/quick_actions.md).
## Reopen a closed epic

View File

@ -215,10 +215,6 @@ To close an issue, you can either:
1. Select **Plan > Issues**, then select your issue to view it.
1. At the top of the issue, select **Close issue**.
<!-- Delete when the `moved_mr_sidebar` feature flag is removed -->
If you don't see this action at the top of an issue, your project or instance might have
enabled a feature flag for [moved actions](../merge_requests/index.md#move-sidebar-actions).
### Reopen a closed issue
Prerequisites:

View File

@ -232,7 +232,7 @@ These examples use regex (regular expressions) string boundary characters to mat
the beginning of a string (`^`), and its end (`$`). They also include instances
where either the directory path or the filename can include `.` or `/`. Both of
these special regex characters must be escaped with a backslash `\\` if you want
to use them as normal characters in a match condition.
to use them as standard characters in a match condition.
- **Prevent pushing `.exe` files to any location in the repository** - This regex
matches any filename that contains `.exe` at the end:

View File

@ -1,2 +1,3 @@
*.gem
coverage
.bundle

View File

@ -0,0 +1,4 @@
include:
- local: gems/gem.gitlab-ci.yml
inputs:
gem_name: "ipynbdiff"

View File

@ -0,0 +1,11 @@
inherit_from:
- ../config/rubocop.yml
CodeReuse/ActiveRecord:
Enabled: false
Naming/FileName:
Exclude:
- spec/**/*.rb
- lib/gitlab/rspec.rb
- lib/gitlab/rspec/all.rb

View File

@ -1,7 +1,7 @@
PATH
remote: .
specs:
ipynbdiff (0.4.7)
gitlab-ipynbdiff (0.4.7)
diffy (~> 3.4)
oj (~> 3.13.16)
@ -15,9 +15,10 @@ GEM
coderay (1.1.3)
diff-lcs (1.5.0)
diffy (3.4.2)
docile (1.4.0)
memory_profiler (1.0.0)
method_source (1.0.0)
oj (3.13.16)
oj (3.13.23)
parser (3.1.2.0)
ast (~> 2.4.1)
proc_to_ast (0.1.0)
@ -47,6 +48,12 @@ GEM
rspec (>= 2.13, < 4)
unparser
rspec-support (3.11.0)
simplecov (0.22.0)
docile (~> 1.1)
simplecov-html (~> 0.11)
simplecov_json_formatter (~> 0.1)
simplecov-html (0.12.3)
simplecov_json_formatter (0.1.4)
unparser (0.6.5)
diff-lcs (~> 1.3)
parser (>= 3.1.0)
@ -57,11 +64,12 @@ PLATFORMS
DEPENDENCIES
benchmark-memory (~> 0.2.0)
bundler (~> 2.2)
ipynbdiff!
gitlab-ipynbdiff!
pry (~> 0.14)
rake (~> 13.0)
rspec (~> 3.10)
rspec-parameterized (~> 0.5.1)
simplecov
BUNDLED WITH
2.3.16

View File

@ -1,34 +1,32 @@
# frozen_string_literal: true
lib = File.expand_path('lib/..', __dir__)
$LOAD_PATH.unshift lib unless $LOAD_PATH.include?(lib)
$LOAD_PATH.push File.expand_path('lib', __dir__ || '')
require 'lib/version'
require_relative 'lib/ipynb_diff/version'
Gem::Specification.new do |s|
s.name = 'ipynbdiff'
s.version = IpynbDiff::VERSION
s.version = IpynbDiff::Version::VERSION
s.summary = 'Human Readable diffs for Jupyter Notebooks'
s.description = 'Better diff for Jupyter Notebooks by first preprocessing them and removing clutter'
s.authors = ['Eduardo Bonet']
s.email = 'ebonet@gitlab.com'
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
s.files = Dir.glob("lib/**/*.*")
s.test_files = Dir.glob("spec/**/*.*")
s.homepage =
'https://gitlab.com/gitlab-org/incubation-engineering/mlops/rb-ipynbdiff'
s.license = 'MIT'
s.require_paths = ['lib']
s.files = Dir['lib/**/*.rb']
s.require_paths = ["lib"]
s.required_ruby_version = ">= 3.0"
s.homepage = 'https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/ipynbdiff'
s.license = 'MIT'
s.add_runtime_dependency 'diffy', '~> 3.4'
s.add_runtime_dependency 'oj', '~> 3.13.16'
s.add_development_dependency 'benchmark-memory', '~>0.2.0'
s.add_development_dependency 'bundler', '~> 2.2'
s.add_development_dependency 'pry', '~> 0.14'
s.add_development_dependency 'rake', '~> 13.0'
s.add_development_dependency 'rspec', '~> 3.10'
s.add_development_dependency 'rspec-parameterized', '~> 0.5.1'
s.add_development_dependency 'benchmark-memory', '~>0.2.0'
s.add_development_dependency 'simplecov', '~> 0.12.0'
end

View File

@ -1,10 +1,11 @@
# frozen_string_literal: true
require 'ipynb_diff/transformer'
require 'ipynb_diff/diff'
require 'ipynb_diff/symbol_map'
# Human Readable Jupyter Diffs
module IpynbDiff
require 'transformer'
require 'diff'
def self.diff(from, to, raise_if_invalid_nb: false, include_frontmatter: false, hide_images: false, diffy_opts: {})
transformer = Transformer.new(include_frontmatter: include_frontmatter, hide_images: hide_images)

View File

@ -1,9 +1,10 @@
# frozen_string_literal: true
require 'ipynb_diff/symbolized_markdown_helper'
module IpynbDiff
# Transforms Jupyter output data into markdown
class OutputTransformer
require 'symbolized_markdown_helper'
include SymbolizedMarkdownHelper
HIDDEN_IMAGE_OUTPUT = ' [Hidden Image Output]'
@ -32,7 +33,7 @@ module IpynbDiff
def transform_error(traceback, symbol)
traceback.map.with_index do |t, idx|
t.split("\n").map do |l|
_(symbol / idx, l.gsub(/\[[0-9][0-9;]*m/, '').sub("\u001B", ' ').gsub(/\u001B/, '').rstrip)
___(symbol / idx, l.gsub(/\[[0-9][0-9;]*m/, '').sub("\u001B", ' ').delete("\u001B").rstrip)
end
end
end
@ -47,22 +48,22 @@ module IpynbDiff
new_symbol = symbol_prefix / output_type
case output_type
when 'image/png', 'image/jpeg'
transform_image(output_type + ';base64', output_element, new_symbol)
transform_image("#{output_type};base64", output_element, new_symbol)
when 'image/svg+xml'
transform_image(output_type + ';utf8', output_element, new_symbol)
transform_image("#{output_type};utf8", output_element, new_symbol)
when 'text/markdown', 'text/latex', 'text/plain', 'text'
transform_text(output_element, new_symbol)
end
end
def transform_image(image_type, image_content, symbol)
return _(nil, HIDDEN_IMAGE_OUTPUT) if @hide_images
return ___(nil, HIDDEN_IMAGE_OUTPUT) if @hide_images
lines = image_content.is_a?(Array) ? image_content : [image_content]
single_line = lines.map(&:strip).join.gsub(/\s+/, ' ')
_(symbol, " ![](data:#{image_type},#{single_line})")
___(symbol, " ![](data:#{image_type},#{single_line})")
end
def transform_text(text_content, symbol)

View File

@ -36,6 +36,7 @@ module IpynbDiff
# .obj1.2.obj3.obj4 -> 9
#
class SymbolMap
# rubocop:disable Lint/UnusedMethodArgument
class << self
def handler
@handler ||= SymbolMap.new
@ -87,7 +88,7 @@ module IpynbDiff
if key.nil? # value in an array
if @current_path.empty?
@current_path = ['']
return nil
return
end
symbol = @current_array_index.last
@ -103,5 +104,6 @@ module IpynbDiff
@symbols = {}
@current_array_index = []
end
# rubocop:enable Lint/UnusedMethodArgument
end
end

View File

@ -3,16 +3,15 @@
module IpynbDiff
# Helper functions
module SymbolizedMarkdownHelper
def _(symbol = nil, content = '')
def ___(symbol = nil, content = '')
{ symbol: symbol, content: content }
end
def symbolize_array(symbol, content, &block)
def symbolize_array(symbol, content)
if content.is_a?(Array)
content.map.with_index { |l, idx| _(symbol / idx, block.call(l)) }
content.map.with_index { |l, idx| ___(symbol / idx, yield(l)) }
else
content.split("\n").map { |c| _(symbol, c) }
content.split("\n").map { |c| ___(symbol, c) }
end
end
end

View File

@ -1,19 +1,18 @@
# frozen_string_literal: true
module IpynbDiff
require 'oj'
require 'json'
require 'yaml'
require 'ipynb_diff/output_transformer'
require 'ipynb_diff/symbolized_markdown_helper'
require 'ipynb_diff/symbol_map'
require 'ipynb_diff/transformed_notebook'
require 'oj'
class InvalidNotebookError < StandardError
end
module IpynbDiff
InvalidNotebookError = Class.new(StandardError)
# Returns a markdown version of the Jupyter Notebook
class Transformer
require 'json'
require 'yaml'
require 'output_transformer'
require 'symbolized_markdown_helper'
require 'symbol_map'
require 'transformed_notebook'
include SymbolizedMarkdownHelper
@include_frontmatter = true
@ -60,10 +59,10 @@ module IpynbDiff
type = cell['cell_type'] || 'raw'
[
_(symbol, %(%% Cell type:#{type} id:#{cell['id']} tags:#{tags&.join(',')})),
_,
___(symbol, %(%% Cell type:#{type} id:#{cell['id']} tags:#{tags&.join(',')})),
___,
rows,
_
___
]
end
@ -73,9 +72,9 @@ module IpynbDiff
def transform_code_cell(cell, notebook, symbol)
[
_(symbol / 'source', %(``` #{notebook.dig('metadata', 'kernelspec', 'language') || ''})),
___(symbol / 'source', %(``` #{notebook.dig('metadata', 'kernelspec', 'language') || ''})),
symbolize_array(symbol / 'source', cell['source'], &:rstrip),
_(nil, '```'),
___(nil, '```'),
transform_outputs(cell['outputs'], symbol)
]
end
@ -84,10 +83,10 @@ module IpynbDiff
transformed = outputs.map
.with_index { |output, i| @out_transformer.transform(output, symbol / ['outputs', i]) }
.compact
.map { |el| [_, el] }
.map { |el| [___, el] }
[
transformed.empty? ? [] : [_, _(symbol / 'outputs', '%% Output')],
transformed.empty? ? [] : [___, ___(symbol / 'outputs', '%% Output')],
transformed
]
end
@ -106,7 +105,7 @@ module IpynbDiff
}
}.to_yaml
as_yaml.split("\n").map { |l| _(nil, l) }.append(_(nil, '---'), _)
as_yaml.split("\n").map { |l| ___(nil, l) }.append(___(nil, '---'), ___)
end
end
end

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
module IpynbDiff
VERSION = '0.4.7'
module Version
VERSION = '0.4.7'
end
end

View File

@ -1,8 +1,11 @@
# frozen_string_literal: true
require 'ipynbdiff'
require 'benchmark'
require 'benchmark/memory'
require_relative 'test_helper'
# rubocop:disable Layout/LineLength
large_cell = '{
"cell_type": "code",
"execution_count": 9,
@ -26,6 +29,7 @@ large_cell = '{
"do_plot(is_sin = False)"
]
},'
# rubocop:enable Layout/LineLength
base = '{
"cells": [

View File

@ -0,0 +1,55 @@
# frozen_string_literal: true
require_relative '../test_helper'
describe IpynbDiff::SymbolMap do
def res(*cases)
cases&.to_h || []
end
describe '.parse' do
subject { described_class.parse(JSON.pretty_generate(source)) }
context 'when object has blank key' do
let(:source) { { "": { "": 5 } } }
it { is_expected.to match_array(res([".", 2], ["..", 3])) }
end
context 'when object is empty' do
let(:source) { {} }
it { is_expected.to be_empty }
end
context 'when object is empty array' do
let(:source) { [] }
it { is_expected.to be_empty }
end
context 'when object has inner object and number' do
let(:source) { { obj1: { obj2: 1 } } }
it { is_expected.to match_array(res(['.obj1', 2], ['.obj1.obj2', 3])) }
end
context 'when object has inner object and number, string and array with object' do
let(:source) { { obj1: { obj2: [123, 2, true], obj3: "hel\nlo", obj4: true, obj5: 123, obj6: 'a' } } }
it do
is_expected.to match_array(
res(['.obj1', 2],
['.obj1.obj2', 3],
['.obj1.obj2.0', 4],
['.obj1.obj2.1', 5],
['.obj1.obj2.2', 6],
['.obj1.obj3', 8],
['.obj1.obj4', 9],
['.obj1.obj5', 10],
['.obj1.obj6', 11])
)
end
end
end
end

View File

@ -0,0 +1,94 @@
# frozen_string_literal: true
require_relative '../test_helper'
describe IpynbDiff::Transformer do
describe '.transform' do
using RSpec::Parameterized::TableSyntax
let!(:default_config) { { include_frontmatter: false, hide_images: false } }
let(:test_case) { read_test_case(test_case_name) }
let(:notebook) { test_case[:input] || FROM_IPYNB }
let(:config) { {} }
subject { described_class.new(**default_config.merge(config)).transform(notebook) }
where(:ctx, :test_case_name, :config) do
'renders metadata' | 'no_cells' | { include_frontmatter: true }
'is empty for no cells, but metadata is false' | 'no_cells_no_metadata' | {}
'adds markdown cell' | 'only_md' | {}
'adds block with only one line of markdown' | 'single_line_md' | {}
'adds raw block' | 'only_raw' | {}
'code cell, but no output' | 'only_code' | {}
'code cell, but no language' | 'only_code_no_language' | {}
'code cell, but no kernelspec' | 'only_code_no_kernelspec' | {}
'code cell, but no nb metadata' | 'only_code_no_metadata' | {}
'text output' | 'text_output' | {}
'ignores html output' | 'ignore_html_output' | {}
'extracts png output along with text' | 'text_png_output' | {}
'embeds svg as image' | 'svg' | {}
'extracts latex output' | 'latex_output' | {}
'extracts error output' | 'error_output' | {}
'does not fetch tags if there is no cell metadata' | 'no_metadata_on_cell' | {}
'generates :percent decorator' | 'percent_decorator' | {}
'parses stream output' | 'stream_text' | {}
'ignores unknown output type' | 'unknown_output_type' | {}
'handles backslash correctly' | 'backslash_as_last_char' | {}
'multiline png output' | 'multiline_png_output' | {}
'hides images when option passed' | 'hide_images' | { hide_images: true }
'\n within source lines' | 'source_with_linebreak' | { hide_images: true }
end
with_them do
it 'generates the expected markdown' do
expect(subject.as_text).to eq test_case[:expected_markdown]
end
it 'marks the lines correctly' do
blocks = subject.blocks.map { |b| b[:source_symbol] }.join("\n")
expect(blocks).to eq test_case[:expected_symbols]
end
end
describe 'Source line map' do
let(:config) { { include_frontmatter: false } }
let(:test_case_name) { 'text_png_output' }
it 'generates the correct transformed to source line map' do
line_numbers = subject.blocks.map { |b| b[:source_line] }.join("\n")
expect(line_numbers).to eq test_case[:expected_line_numbers]
end
end
context 'when json is invalid' do
let(:notebook) { 'a' }
it 'raises error' do
expect { subject }.to raise_error(IpynbDiff::InvalidNotebookError)
end
end
context 'when it does not have the cell tag' do
let(:notebook) { '{"metadata":[]}' }
it 'raises error' do
expect { subject }.to raise_error(IpynbDiff::InvalidNotebookError)
end
end
context 'when notebook can not be parsed' do
let(:notebook) { '{"cells":[]}' }
before do
allow(Oj::Parser.usual).to receive(:parse).and_return(nil)
end
it 'raises error' do
expect { subject }.to raise_error(IpynbDiff::InvalidNotebookError)
end
end
end
end

View File

@ -1,59 +1,55 @@
# frozen_string_literal: true
require 'ipynbdiff'
require 'rspec'
require 'rspec-parameterized'
BASE_PATH = File.join(File.expand_path(File.dirname(__FILE__)), 'testdata')
require_relative 'test_helper'
describe IpynbDiff do
def diff_signs(diff)
diff.to_s(:text).scan(/.*\n/).map { |l| l[0] }.join('')
diff.to_s(:text).scan(/.*\n/).map { |l| l[0] }.join('') # rubocop:disable Rails/Pluck
end
describe 'diff' do
let(:from_path) { File.join(BASE_PATH, 'from.ipynb') }
let(:to_path) { File.join(BASE_PATH,'to.ipynb') }
describe '.diff' do
let(:from_path) { FROM_PATH }
let(:to_path) { TO_PATH }
let(:from) { File.read(from_path) }
let(:to) { File.read(to_path) }
let(:include_frontmatter) { false }
let(:hide_images) { false }
subject { IpynbDiff.diff(from, to, include_frontmatter: include_frontmatter, hide_images: hide_images) }
subject { described_class.diff(from, to, include_frontmatter: include_frontmatter, hide_images: hide_images) }
context 'if preprocessing is active' do
it 'html tables are stripped' do
is_expected.to_not include('<td>')
is_expected.not_to include('<td>')
end
end
context 'when to is nil' do
let(:to) { nil }
let(:from_path) { File.join(BASE_PATH, 'only_md', 'input.ipynb') }
let(:from_path) { test_case_input_path('only_md') }
it 'all lines are removals' do
expect(diff_signs(subject)).to eq('-----')
end
end
context 'when to is nil' do
context 'when from is nil' do
let(:from) { nil }
let(:to_path) { File.join(BASE_PATH, 'only_md', 'input.ipynb') }
let(:to_path) { test_case_input_path('only_md') }
it 'all lines are additions' do
expect(diff_signs(subject)).to eq('+++++')
end
end
context 'When include_frontmatter is true' do
context 'when include_frontmatter is true' do
let(:include_frontmatter) { true }
it 'should show changes metadata in the metadata' do
it 'shows changes metadata in the metadata' do
expect(subject.to_s(:text)).to include('+ display_name: New Python 3 (ipykernel)')
end
end
context 'When hide_images is true' do
context 'when hide_images is true' do
let(:hide_images) { true }
it 'hides images' do
@ -61,9 +57,9 @@ describe IpynbDiff do
end
end
context 'When include_frontmatter is false' do
it 'should drop metadata from the diff' do
expect(subject.to_s(:text)).to_not include('+ display_name: New Python 3 (ipykernel)')
context 'when include_frontmatter is false' do
it 'drops metadata from the diff' do
expect(subject.to_s(:text)).not_to include('+ display_name: New Python 3 (ipykernel)')
end
end
@ -83,40 +79,47 @@ describe IpynbDiff do
end
end
describe 'transform' do
[nil, 'a', '{"metadata":[]}'].each do |invalid_nb|
context "when json is invalid (#{invalid_nb || 'nil'})" do
it 'is nil' do
expect(IpynbDiff.transform(invalid_nb)).to be_nil
end
describe '.transform' do
let(:notebook) { FROM_IPYNB }
let(:include_frontmatter) { false }
let(:hide_images) { false }
subject do
described_class.transform(notebook,
include_frontmatter: include_frontmatter,
hide_images: hide_images)
end
describe 'error cases' do
using RSpec::Parameterized::TableSyntax
where(:ctx, :notebook) do
'notebook is nil' | nil
'notebook is invalid' | 'a'
'notebook does not have cell' | '{"metadata":[]}'
end
with_them do
it { is_expected.to be_nil }
end
end
context 'options' do
let(:include_frontmatter) { false }
let(:hide_images) { false }
subject do
IpynbDiff.transform(File.read(File.join(BASE_PATH, 'from.ipynb')),
include_frontmatter: include_frontmatter,
hide_images: hide_images)
describe 'options' do
context 'when include_frontmatter is false' do
it { is_expected.not_to include('display_name: Python 3 (ipykernel)') }
end
context 'include_frontmatter is false' do
it { is_expected.to_not include('display_name: Python 3 (ipykernel)') }
end
context 'include_frontmatter is true' do
context 'when include_frontmatter is true' do
let(:include_frontmatter) { true }
it { is_expected.to include('display_name: Python 3 (ipykernel)') }
end
context 'hide_images is false' do
context 'when hide_images is false' do
it { is_expected.not_to include('[Hidden Image Output]') }
end
context 'hide_images is true' do
context 'when hide_images is true' do
let(:hide_images) { true }
it { is_expected.to include(' [Hidden Image Output]') }

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
require 'simplecov'
SimpleCov.start
require 'ipynb_diff'
require 'rspec'
require 'rspec-parameterized'
require 'json'
BASE_PATH = File.join(__dir__ || '', 'testdata')
FROM_PATH = File.join(BASE_PATH, 'from.ipynb')
TO_PATH = File.join(BASE_PATH, 'to.ipynb')
FROM_IPYNB = File.read(FROM_PATH)
TO_IPYNB = File.read(TO_PATH)
def test_case_input_path(test_case)
File.join(BASE_PATH, test_case, 'input.ipynb')
end
def test_case_symbols_path(test_case)
File.join(BASE_PATH, test_case, 'expected_symbols.txt')
end
def test_case_md_path(test_case)
File.join(BASE_PATH, test_case, 'expected.md')
end
def test_case_line_numbers_path(test_case)
File.join(BASE_PATH, test_case, 'expected_line_numbers.txt')
end
def read_file_if_exists(path)
File.read(path) if File.file?(path)
end
def read_test_case(test_case_name)
{
input: read_file_if_exists(test_case_input_path(test_case_name)),
expected_markdown: read_file_if_exists(test_case_md_path(test_case_name)),
expected_symbols: read_file_if_exists(test_case_symbols_path(test_case_name)),
expected_line_numbers: read_file_if_exists(test_case_line_numbers_path(test_case_name))
}
end

Some files were not shown because too many files have changed in this diff Show More