Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-01-17 15:41:18 +00:00
parent ecb6bda4d4
commit a1f2c38e4b
84 changed files with 1647 additions and 256 deletions

View File

@ -1,15 +1,52 @@
<!-- This template is used for proposing changes to the left sidebar contextual navigation. This could include additions, removals, or general changes to overall hierarchy.-->
### Proposal
# Summary
_Please summarize the navigation changes you are hoping to make._
# Background
<!-- Use this section to explain the proposed changes, including details around usage and business drivers. -->
Link to epic / issue with overall feature proposal: _issue link_
#### Other locations that were considered
Does this navigation proposal facilitate one of our primary JTBDs? Which job?
<!-- Include other design patterns or places you considered for this feature besides navigation. -->
### Checklist
How does this change improve the workflow for users attempting to complete that job?
How many users will be impacted by this proposed change?
- [ ] Limited
- [ ] Moderate
- [ ] All users
What is the product maturity stage of the associated feature?
- [ ] Experimental
- [ ] Beta
- [ ] General availability
How often do you expect an average GitLab user (_not_ just your target persona) to reach for this functionality?
- [ ] Several times a day
- [ ] Once a day
- [ ] A few times a week
- [ ] Once a week
- [ ] Less than once a week
# Approaches considered
_Please describe the alternative designs you considered._
# Justification
_Provide your justification for this change. See https://handbook.gitlab.com/handbook/product/ux/navigation/ for the minimum validation we request based on the scope of access of your proposed change._
# Review checklist
#### Requester
- [ ] Review the [handbook page for navigation](https://handbook.gitlab.com/handbook/product/ux/navigation/).
- [ ] Add relevant information to the issue description detailing your proposal, including usage and business drivers.
@ -22,5 +59,16 @@
- [ ] Consider whether you need to [communicate the change somehow](https://design.gitlab.com/patterns/navigation#messaging-changes-to-users), or if you will have an interim period in the UI where your item will live in more than one place.
- [ ] Ensure engineers are familiar with the [implementation steps for navigation](https://docs.gitlab.com/ee/development/navigation_sidebar.html#navigation-sidebar).
#### Foundations Product Manager
- [ ] Confirm proposal has necessary information
- [ ] Schedule design review for next milestone
#### Foundations Product Designer
- [ ] Confirm Pajamas guidelines are followed
- [ ] Confirm a11y needs are addressed
- [ ] Confirm burden of proof supplied for stated scope of access
/label ~UX ~"UI text" ~"documentation" ~"Category:Navigation & Settings" ~navigation ~type::ignore
/label ~"Nav request::Start"
/label ~"Nav request::Start"

View File

@ -570,7 +570,7 @@
{"name":"rbs","version":"3.6.1","platform":"ruby","checksum":"ed7273d018556844583d1785ac54194e67eec594d68e317d57fa90ad035532c0"},
{"name":"rbtrace","version":"0.5.1","platform":"ruby","checksum":"e8cba64d462bfb8ba102d7be2ecaacc789247d52ac587d8003549d909cb9c5dc"},
{"name":"rchardet","version":"1.8.0","platform":"ruby","checksum":"693acd5253d5ade81a51940697955f6dd4bb2f0d245bda76a8e23deec70a52c7"},
{"name":"rdoc","version":"6.10.0","platform":"ruby","checksum":"db665021883ac9df3ba29cdf71aece960749888db1bf9615b4a584cfa3fa3eda"},
{"name":"rdoc","version":"6.11.0","platform":"ruby","checksum":"bec66fb9b019be64f7ba7d2cd2aecb283a3a01fef23a95b33e2349c6d1aa0040"},
{"name":"re2","version":"2.7.0","platform":"aarch64-linux","checksum":"778921298b6e8aba26a6230dd298c9b361b92e45024f81fa6aee788060fa307c"},
{"name":"re2","version":"2.7.0","platform":"arm-linux","checksum":"d328b5286d83ae265e13b855da8e348a976f80f91b748045b52073a570577954"},
{"name":"re2","version":"2.7.0","platform":"arm64-darwin","checksum":"7d993f27a1afac4001c539a829e2af211ced62604930c90df32a307cf74cb4a4"},

View File

@ -1568,7 +1568,7 @@ GEM
msgpack (>= 0.4.3)
optimist (>= 3.0.0)
rchardet (1.8.0)
rdoc (6.10.0)
rdoc (6.11.0)
psych (>= 4.0.0)
re2 (2.7.0)
mini_portile2 (~> 2.8.5)

View File

@ -5,6 +5,7 @@ import { mapActions, mapGetters } from 'vuex';
import { parseBoolean } from '~/lib/utils/common_utils';
import { apolloProvider } from '~/graphql_shared/issuable_client';
import store from '~/mr_notes/stores';
import { pinia } from '~/pinia/instance';
export const initReviewBar = () => {
const el = document.getElementById('js-review-bar');
@ -17,6 +18,7 @@ export const initReviewBar = () => {
new Vue({
el,
store,
pinia,
apolloProvider,
components: {
ReviewBar: () => import('./components/review_bar.vue'),

View File

@ -38,7 +38,7 @@ export default {
type: Object,
required: true,
},
hideDefaultActions: {
isBlobPage: {
type: Boolean,
required: false,
default: false,
@ -121,9 +121,6 @@ export default {
};
},
computed: {
showDefaultActions() {
return !this.hideDefaultActions;
},
showWebIdeLink() {
return !this.blob.archived && this.blob.editBlobPath;
},
@ -206,7 +203,7 @@ export default {
<slot name="actions"></slot>
<default-actions
v-if="showDefaultActions"
v-if="!glFeatures.blobOverflowMenu || (glFeatures.blobOverflowMenu && !isBlobPage)"
:raw-path="blob.externalStorageUrl || blob.rawPath"
:active-viewer="viewer"
:has-render-error="hasRenderError"

View File

@ -91,6 +91,7 @@ export default {
gauss: () => import(/* webpackChunkName: 'hl-gauss' */ 'highlight.js/lib/languages/gauss'),
gcode: () => import(/* webpackChunkName: 'hl-gcode' */ 'highlight.js/lib/languages/gcode'),
gherkin: () => import(/* webpackChunkName: 'hl-gherkin' */ 'highlight.js/lib/languages/gherkin'),
gleam: () => import(/* webpackChunkName: 'hl-gleam' */ '@gleam-lang/highlight.js-gleam'),
glsl: () => import(/* webpackChunkName: 'hl-glsl' */ 'highlight.js/lib/languages/glsl'),
gml: () => import(/* webpackChunkName: 'hl-gml' */ 'highlight.js/lib/languages/gml'),
go: () => import(/* webpackChunkName: 'hl-go' */ 'highlight.js/lib/languages/go'),

View File

@ -6,13 +6,14 @@ import { cleanLeadingSeparator } from '~/lib/utils/url_utility';
import { apolloProvider } from '~/graphql_shared/issuable_client';
import { getCookie, parseBoolean, removeCookie } from '~/lib/utils/common_utils';
import notesStore from '~/mr_notes/stores';
import { pinia as piniaStore } from '~/pinia/instance';
import eventHub from '../notes/event_hub';
import DiffsApp from './components/app.vue';
import { TREE_LIST_STORAGE_KEY, DIFF_WHITESPACE_COOKIE_NAME } from './constants';
export default function initDiffsApp(store = notesStore) {
export default function initDiffsApp(store = notesStore, pinia = piniaStore) {
const el = document.getElementById('js-diffs-app');
const { dataset } = el;
@ -25,6 +26,7 @@ export default function initDiffsApp(store = notesStore) {
components: {
DiffsApp,
},
pinia,
store,
apolloProvider,
provide: {

View File

@ -1,6 +1,7 @@
import Vue from 'vue';
import DiscussionCounter from '~/notes/components/discussion_counter.vue';
import store from '~/mr_notes/stores';
import { pinia } from '~/pinia/instance';
export function initDiscussionCounter() {
const el = document.getElementById('js-vue-discussion-counter');
@ -15,6 +16,7 @@ export function initDiscussionCounter() {
components: {
DiscussionCounter,
},
pinia,
store,
render(createElement) {
return createElement('discussion-counter', {

View File

@ -44,7 +44,7 @@ function setupMrNotesState(store, notesDataset, diffsDataset) {
});
}
export function initMrStateLazyLoad(store = mrNotes, { reviewBarParams } = {}) {
export function initMrStateLazyLoad(store = mrNotes) {
store.dispatch('setActiveTab', window.mrTabs.getCurrentAction());
window.mrTabs.eventHub.$on('MergeRequestTabChange', (value) =>
store.dispatch('setActiveTab', value),
@ -65,7 +65,7 @@ export function initMrStateLazyLoad(store = mrNotes, { reviewBarParams } = {}) {
eventHub.$once('fetchNotesData', () => store.dispatch('fetchNotes'));
requestIdleCallback(() => {
initReviewBar(reviewBarParams);
initReviewBar();
initOverviewTabCounter();
initDiscussionCounter();
});

View File

@ -4,7 +4,7 @@ import { initMrStateLazyLoad } from '~/mr_notes/init';
import MergeRequest from '../merge_request';
import { resetServiceWorkersPublicPath } from '../lib/utils/webpack';
export default function initMrNotes(lazyLoadParams) {
export default function initMrNotes() {
resetServiceWorkersPublicPath();
const mrShowNode = document.querySelector('.merge-request');
@ -13,7 +13,7 @@ export default function initMrNotes(lazyLoadParams) {
action: mrShowNode.dataset.mrAction,
});
initMrStateLazyLoad(undefined, lazyLoadParams);
initMrStateLazyLoad();
document.addEventListener('merged:UpdateActions', () => {
initRevertCommitModal('i_code_review_post_merge_submit_revert_modal');

View File

@ -7,6 +7,7 @@ import { apolloProvider } from '~/graphql_shared/issuable_client';
import { renderGFM } from '~/behaviors/markdown/render_gfm';
import { parseBoolean } from '~/lib/utils/common_utils';
import store from '~/mr_notes/stores';
import { pinia } from '~/pinia/instance';
import notesEventHub from '~/notes/event_hub';
import discussionNavigator from '../notes/components/discussion_navigator.vue';
import NotesApp from '../notes/components/notes_app.vue';
@ -38,6 +39,7 @@ export default () => {
components: {
NotesApp,
},
pinia,
store,
apolloProvider,
provide: {

View File

@ -16,7 +16,7 @@ import toast from '~/vue_shared/plugins/global_toast';
import getStateQuery from './queries/get_state.query.graphql';
import initCheckoutModal from './init_checkout_modal';
export default function initMergeRequestShow(store) {
export default function initMergeRequestShow(store, pinia) {
new ZenMode(); // eslint-disable-line no-new
initPipelineCountListener(document.querySelector('#commit-pipeline-table-view'));
addShortcutsExtension(ShortcutsIssuable);
@ -33,6 +33,7 @@ export default function initMergeRequestShow(store) {
new Vue({
el,
name: 'MergeRequestHeaderRoot',
pinia,
store,
apolloProvider: new VueApollo({
defaultClient: createDefaultClient(),

View File

@ -10,6 +10,7 @@ import initSidebarBundle from '~/sidebar/sidebar_bundle';
import { apolloProvider } from '~/graphql_shared/issuable_client';
import { parseBoolean } from '~/lib/utils/common_utils';
import { initMrMoreDropdown } from '~/mr_more_dropdown';
import { pinia } from '~/pinia/instance';
import initShow from './init_merge_request_show';
import getStateQuery from './queries/get_state.query.graphql';
@ -21,7 +22,7 @@ const tabData = Vue.observable({
export function initMrPage() {
initMrNotes();
initShow(store);
initShow(store, pinia);
initMrMoreDropdown();
startCodeReviewMessaging({ signalBus: diffsEventHub });
@ -44,7 +45,7 @@ export function initMrPage() {
}
requestIdleCallback(() => {
initSidebarBundle(store);
initSidebarBundle(store, pinia);
const el = document.getElementById('js-merge-sticky-header');
@ -78,6 +79,7 @@ requestIdleCallback(() => {
el,
name: 'MergeRequestStickyHeaderRoot',
store,
pinia,
apolloProvider,
provide: {
query: getStateQuery,

View File

@ -0,0 +1,6 @@
import Vue from 'vue';
import { createPinia, PiniaVuePlugin } from 'pinia';
Vue.use(PiniaVuePlugin);
export const pinia = createPinia();

View File

@ -335,6 +335,7 @@ export default {
<gl-loading-icon v-if="isLoading" size="sm" />
<div v-if="blobInfo && !isLoading" id="fileHolder" class="file-holder">
<blob-header
is-blob-page
:blob="blobInfo"
:hide-viewer-switcher="isBinaryFileType || isUsingLfs"
:is-binary="isBinaryFileType"

View File

@ -78,6 +78,7 @@ export default {
'kerberosUrl',
'downloadLinks',
'downloadArtifacts',
'isBinary',
],
props: {
projectPath: {
@ -296,7 +297,7 @@ export default {
</div>
<!-- Blob controls -->
<blob-controls :project-path="projectPath" :ref-type="getRefType" />
<blob-controls :project-path="projectPath" :ref-type="getRefType" :is-binary="isBinary" />
</div>
</section>
</template>

View File

@ -3,6 +3,7 @@ import { GlButton, GlTooltipDirective } from '@gitlab/ui';
import { __ } from '~/locale';
import { createAlert } from '~/alert';
import getRefMixin from '~/repository/mixins/get_ref';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import initSourcegraph from '~/sourcegraph';
import Shortcuts from '~/behaviors/shortcuts/shortcuts';
import { addShortcutsExtension } from '~/behaviors/shortcuts';
@ -21,6 +22,8 @@ import { FIND_FILE_BUTTON_CLICK } from '~/tracking/constants';
import { updateElementsVisibility } from '~/repository/utils/dom';
import blobControlsQuery from '~/repository/queries/blob_controls.query.graphql';
import { getRefType } from '~/repository/utils/ref_type';
import { TEXT_FILE_TYPE } from '../../constants';
import OverflowMenu from './blob_overflow_menu.vue';
export default {
i18n: {
@ -33,11 +36,12 @@ export default {
buttonClassList: 'sm:gl-w-auto gl-w-full sm:gl-mt-0 gl-mt-3',
components: {
GlButton,
OverflowMenu,
},
directives: {
GlTooltip: GlTooltipDirective,
},
mixins: [getRefMixin],
mixins: [getRefMixin, glFeatureFlagMixin()],
apollo: {
project: {
query: blobControlsQuery,
@ -67,6 +71,11 @@ export default {
required: false,
default: null,
},
isBinary: {
type: Boolean,
required: false,
default: false,
},
},
data() {
return {
@ -74,6 +83,9 @@ export default {
};
},
computed: {
isLoadingRepositoryBlob() {
return this.$apollo.queries.project.loading;
},
filePath() {
return this.$route.params.path;
},
@ -86,6 +98,12 @@ export default {
showBlameButton() {
return !this.blobInfo.storedExternally && this.blobInfo.externalStorage !== 'lfs';
},
isBinaryFileType() {
return this.isBinary || this.blobInfo.simpleViewer?.fileType !== TEXT_FILE_TYPE;
},
rawPath() {
return this.blobInfo.externalStorageUrl || this.blobInfo.rawPath;
},
findFileShortcutKey() {
return keysFor(START_SEARCH_PROJECT_FILE)[0];
},
@ -106,6 +124,9 @@ export default {
? null
: sanitize(`${description} <kbd class="flat gl-ml-1" aria-hidden=true>${key}</kbd>`);
},
isEmpty() {
return this.blobInfo.rawSize === '0';
},
},
watch: {
showBlobControls(shouldShow) {
@ -136,11 +157,14 @@ export default {
InternalEvents.trackEvent(FIND_FILE_BUTTON_CLICK);
Shortcuts.focusSearchFile();
},
onCopy() {
navigator.clipboard.writeText(this.blobInfo.rawTextBlob);
},
},
};
</script>
<template>
<div v-if="showBlobControls" class="gl-flex gl-flex-wrap gl-items-baseline gl-gap-3">
<div v-if="showBlobControls" class="gl-flex gl-flex-wrap gl-items-center gl-gap-3">
<gl-button
v-gl-tooltip.html="findFileTooltip"
:aria-keyshortcuts="findFileShortcutKey"
@ -170,5 +194,18 @@ export default {
>
{{ $options.i18n.permalink }}
</gl-button>
<overflow-menu
v-if="!isLoadingRepositoryBlob && glFeatures.blobOverflowMenu"
:raw-path="rawPath"
:rich-viewer="blobInfo.richViewer"
:simple-viewer="blobInfo.simpleViewer"
:is-binary="isBinaryFileType"
:environment-name="blobInfo.environmentFormattedExternalUrl"
:environment-path="blobInfo.environmentExternalUrlForRouteMap"
:is-empty="isEmpty"
:override-copy="true"
@copy="onCopy"
/>
</div>
</template>

View File

@ -2,6 +2,7 @@
import { GlDisclosureDropdown, GlDisclosureDropdownItem, GlTooltipDirective } from '@gitlab/ui';
import { sprintf, s__, __ } from '~/locale';
import { setUrlParams, relativePathToAbsolute, getBaseURL } from '~/lib/utils/url_utility';
import { SIMPLE_BLOB_VIEWER, RICH_BLOB_VIEWER } from '~/blob/components/constants';
export const i18n = {
dropdownLabel: __('Actions'),
@ -10,12 +11,8 @@ export const i18n = {
btnRawTitle: s__('BlobViewer|Open raw'),
};
const RICH_BLOB_VIEWER = 'rich';
const SIMPLE_BLOB_VIEWER = 'simple';
export default {
i18n,
RICH_BLOB_VIEWER,
components: {
GlDisclosureDropdown,
GlDisclosureDropdownItem,
@ -36,15 +33,15 @@ export default {
type: String,
required: true,
},
activeViewer: {
type: String,
default: SIMPLE_BLOB_VIEWER,
richViewer: {
type: Object,
required: false,
default: () => {},
},
hasRenderError: {
type: Boolean,
simpleViewer: {
type: Object,
required: false,
default: false,
default: () => {},
},
isBinary: {
type: Boolean,
@ -73,11 +70,26 @@ export default {
},
},
computed: {
activeViewerType() {
if (this.$route?.query?.plain !== '1') {
const richViewer = document.querySelector('.blob-viewer[data-type="rich"]');
if (richViewer) {
return RICH_BLOB_VIEWER;
}
}
return SIMPLE_BLOB_VIEWER;
},
viewer() {
return this.activeViewerType === RICH_BLOB_VIEWER ? this.richViewer : this.simpleViewer;
},
hasRenderError() {
return Boolean(this.viewer.renderError);
},
downloadUrl() {
return setUrlParams({ inline: false }, relativePathToAbsolute(this.rawPath, getBaseURL()));
},
copyDisabled() {
return this.activeViewer === this.$options.RICH_BLOB_VIEWER;
return this.activeViewerType === RICH_BLOB_VIEWER;
},
getBlobHashTarget() {
if (this.overrideCopy) {

View File

@ -61,6 +61,7 @@ export default function initHeaderApp({ router, isReadmeView = false, isBlobView
downloadLinks,
downloadArtifacts,
projectShortPath,
isBinary,
} = headerEl.dataset;
const {
@ -125,6 +126,7 @@ export default function initHeaderApp({ router, isReadmeView = false, isBlobView
downloadLinks: downloadLinks ? JSON.parse(downloadLinks) : null,
downloadArtifacts: downloadArtifacts ? JSON.parse(downloadArtifacts) : [],
isBlobView,
isBinary: parseBoolean(isBinary),
},
apolloProvider,
router: router || createRouter(projectPath, escapedRef),

View File

@ -46,7 +46,12 @@ export default {
.catch(() => this.$emit('error'));
},
initHighlightWorker(blob, isUsingLfs) {
const { rawTextBlob, language, fileType, externalStorageUrl, rawPath, simpleViewer } = blob;
const { rawTextBlob, name, fileType, externalStorageUrl, rawPath, simpleViewer } = blob;
let { language } = blob;
if (name.endsWith('.gleam')) {
language = 'gleam';
}
if (simpleViewer?.fileType !== TEXT_FILE_TYPE) return;

View File

@ -9,6 +9,22 @@ query getBlobControls($projectPath: ID!, $filePath: String!, $ref: String!, $ref
permalinkPath
storedExternally
externalStorage
environmentFormattedExternalUrl
environmentExternalUrlForRouteMap
rawPath
rawTextBlob
simpleViewer {
fileType
tooLarge
type
renderError
}
richViewer {
fileType
tooLarge
type
renderError
}
}
}
}

View File

@ -382,7 +382,7 @@ function mountSidebarReferenceWidget() {
});
}
function mountIssuableLockForm(store) {
function mountIssuableLockForm(store, pinia) {
const el = document.querySelector('.js-sidebar-lock-root');
if (!el || !store) {
@ -394,6 +394,7 @@ function mountIssuableLockForm(store) {
return new Vue({
el,
name: 'SidebarLockRoot',
pinia,
store,
provide: {
fullPath,
@ -721,7 +722,7 @@ export function mountAssigneesDropdown() {
});
}
export function mountSidebar(mediator, store) {
export function mountSidebar(mediator, store, pinia) {
mountSidebarTodoWidget();
mountSidebarAssigneesWidget();
mountSidebarReviewers(mediator);
@ -730,7 +731,7 @@ export function mountSidebar(mediator, store) {
mountSidebarMilestoneWidget();
mountSidebarDueDateWidget();
mountSidebarReferenceWidget();
mountIssuableLockForm(store);
mountIssuableLockForm(store, pinia);
mountSidebarParticipantsWidget();
mountSidebarSubscriptionsWidget();
mountCopyEmailToClipboard();

View File

@ -1,9 +1,9 @@
import { mountSidebar, getSidebarOptions } from 'ee_else_ce/sidebar/mount_sidebar';
import Mediator from './sidebar_mediator';
export default (store) => {
export default (store, pinia) => {
const mediator = new Mediator(getSidebarOptions());
mediator.fetch();
mountSidebar(mediator, store);
mountSidebar(mediator, store, pinia);
};

View File

@ -22,6 +22,7 @@ export const DEFAULT_SNOWPLOW_OPTIONS = {
export const ACTION_ATTR_SELECTOR = '[data-track-action]';
export const LOAD_ACTION_ATTR_SELECTOR = '[data-track-action="render"]';
// Keep these in sync with the strings used in spec/support/matchers/internal_events_matchers.rb
export const INTERNAL_EVENTS_SELECTOR = '[data-event-tracking]';
export const LOAD_INTERNAL_EVENTS_SELECTOR = '[data-event-tracking-load="true"]';

View File

@ -10,6 +10,7 @@ export const ROUGE_TO_HLJS_LANGUAGE_MAP = {
jinja: 'django',
docker: 'dockerfile',
batchfile: 'dos',
elixir: 'elixir',
html: 'xml',
hylang: 'hy',
tex: 'latex',

View File

@ -19,6 +19,7 @@ class Projects::TreeController < Projects::ApplicationController
before_action do
push_frontend_feature_flag(:inline_blame, @project)
push_frontend_feature_flag(:blob_overflow_menu, current_user)
push_licensed_feature(:file_locks) if @project.licensed_feature_available?(:file_locks)
end

View File

@ -306,6 +306,7 @@ module BlobHelper
def vue_blob_header_app_data(project, blob, ref)
{
blob_path: blob.path,
is_binary: blob.binary?,
breadcrumbs: breadcrumb_data_attributes,
escaped_ref: ActionDispatch::Journey::Router::Utils.escape_path(ref),
history_link: project_commits_path(project, ref),

View File

@ -16,6 +16,7 @@ module DesignManagement
include Participable
include CacheMarkdownField
include Subscribable
include EachBatch
cache_markdown_field :description

View File

@ -6,6 +6,7 @@ module DesignManagement
include ShaAttribute
include AfterCommitQueue
include Gitlab::Utils::StrongMemoize
include EachBatch
extend Gitlab::ExclusiveLeaseHelpers
NotSameIssue = Class.new(StandardError)

View File

@ -8,6 +8,10 @@ module WorkItems
def raise_error(message)
raise Error, message
end
def log_error(message)
::Gitlab::AppLogger.error message
end
end
end
end

View File

@ -5,11 +5,52 @@ module WorkItems
module Widgets
class Designs < Base
def after_save_commit
# copy designs
return unless target_work_item.get_widget(:designs)
return unless work_item.designs.exists?
unless user_can_copy?
log_error("User cannot copy designs to work item")
return
end
target_design_collection = target_work_item.design_collection
unless target_design_collection.can_start_copy?
log_error("Target design collection copy state must be `ready`")
return
end
target_design_collection.start_copy!
DesignManagement::CopyDesignCollectionWorker.perform_async(current_user.id, work_item.id, target_work_item.id)
end
def post_move_cleanup
# do it
cleanup_designs
cleanup_design_versions
end
private
def user_can_copy?
current_user.can?(:read_design, work_item) && current_user.can?(:admin_issue, target_work_item)
end
# cleanup all designs for the work item, we use destroy as there are the notes, user_mentions and events
# associations that have `dependent: delete_all` and they need to be deleted too, after they are being copied
# to the target work item
def cleanup_designs
work_item.designs.each_batch(of: BATCH_SIZE) do |designs|
designs.destroy_all # rubocop:disable Cop/DestroyAll -- need to destroy all designs with associated records
end
end
# cleanup all design versions for the work item, we can safely use delete_all as there are no associated
# records or callbacks
def cleanup_design_versions
work_item.design_versions.each_batch(of: BATCH_SIZE) do |design_versions|
design_versions.delete_all
end
end
end
end

View File

@ -0,0 +1,71 @@
# frozen_string_literal: true
class CreateSiphonIssues < ClickHouse::Migration
def up
execute <<-SQL
CREATE TABLE IF NOT EXISTS siphon_issues
(
id Int64,
title Nullable(String),
author_id Nullable(Int64),
project_id Nullable(Int64),
created_at Nullable(DateTime64(6, 'UTC')),
updated_at Nullable(DateTime64(6, 'UTC')),
description Nullable(String),
milestone_id Nullable(Int64),
iid Nullable(Int64),
updated_by_id Nullable(Int64),
weight Nullable(Int64),
confidential Bool DEFAULT false,
due_date Nullable(Date32),
moved_to_id Nullable(Int64),
lock_version Nullable(Int64) DEFAULT 0,
title_html Nullable(String),
description_html Nullable(String),
time_estimate Nullable(Int64) DEFAULT 0,
relative_position Nullable(Int64),
service_desk_reply_to Nullable(String),
cached_markdown_version Nullable(Int64),
last_edited_at Nullable(DateTime64(6, 'UTC')),
last_edited_by_id Nullable(Int64),
discussion_locked Nullable(Bool),
closed_at Nullable(DateTime64(6, 'UTC')),
closed_by_id Nullable(Int64),
state_id Int8 DEFAULT 1,
duplicated_to_id Nullable(Int64),
promoted_to_epic_id Nullable(Int64),
health_status Nullable(Int8),
external_key Nullable(String),
sprint_id Nullable(Int64),
blocking_issues_count Int64 DEFAULT 0,
upvotes_count Int64 DEFAULT 0,
work_item_type_id Nullable(Int64),
namespace_id Nullable(Int64),
start_date Nullable(Date32),
tmp_epic_id Nullable(Int64),
imported_from Int8 DEFAULT 0,
correct_work_item_type_id Int64,
author_id_convert_to_bigint Nullable(Int64),
closed_by_id_convert_to_bigint Nullable(Int64),
duplicated_to_id_convert_to_bigint Nullable(Int64),
id_convert_to_bigint Int64 DEFAULT 0,
last_edited_by_id_convert_to_bigint Nullable(Int64),
milestone_id_convert_to_bigint Nullable(Int64),
moved_to_id_convert_to_bigint Nullable(Int64),
project_id_convert_to_bigint Nullable(Int64),
promoted_to_epic_id_convert_to_bigint Nullable(Int64),
updated_by_id_convert_to_bigint Nullable(Int64),
_siphon_replicated_at DateTime64(6, 'UTC') DEFAULT now(),
_siphon_deleted Bool DEFAULT FALSE
)
ENGINE = ReplacingMergeTree(_siphon_replicated_at, _siphon_deleted)
PRIMARY KEY id
SQL
end
def down
execute <<-SQL
DROP TABLE IF EXISTS siphon_issues
SQL
end
end

View File

@ -5,4 +5,4 @@ feature_category: compliance_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/172754
milestone: '17.7'
queued_migration_version: 20241114202257
finalized_by: # version of the migration that finalized this BBM
finalized_by: 20250106194913

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddIndexToDesignManagementDesingsOnIssueIdAndId < Gitlab::Database::Migration[2.2]
INDEX_NAME = "index_on_design_management_designs_issue_id_and_id"
milestone '17.9'
disable_ddl_transaction!
def up
add_concurrent_index :design_management_designs, [:issue_id, :id], name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :design_management_designs, INDEX_NAME
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddIndexToDesignManagementVersionsOnIssueIdAndId < Gitlab::Database::Migration[2.2]
INDEX_NAME = "index_design_management_versions_on_issue_id_and_id"
EXISTING_INDEX_NAME = 'index_design_management_versions_on_issue_id'
milestone '17.9'
disable_ddl_transaction!
def up
add_concurrent_index :design_management_versions, [:issue_id, :id], name: INDEX_NAME
remove_concurrent_index_by_name(:design_management_versions, EXISTING_INDEX_NAME)
end
def down
remove_concurrent_index_by_name :design_management_versions, INDEX_NAME
add_concurrent_index :design_management_versions, :issue_id, name: EXISTING_INDEX_NAME
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class FinalizeBackfillComplianceViolationNullTargetProjectIdsMigration < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.9'
restrict_gitlab_migration gitlab_schema: :gitlab_main
def up
ensure_batched_background_migration_is_finished(
job_class_name: 'BackfillComplianceViolationNullTargetProjectIds',
table_name: :merge_requests_compliance_violations,
column_name: :id,
job_arguments: [],
finalize: true
)
end
def down; end
end

View File

@ -0,0 +1 @@
0c5ebc023d7e5dbe41c4a1a986768c6cc3427d8a1010cbf11fcf40b83dcd6e96

View File

@ -0,0 +1 @@
544d80aefe22b288f4f31413d34e7fdd25aff45276488f94cf64648dbbeb69d0

View File

@ -0,0 +1 @@
55951764e2acbed00867eb5758a1cf77717163b83aef31d3d51a48bd52b97316

View File

@ -31611,7 +31611,7 @@ CREATE INDEX index_design_management_repository_states_pending_verification ON d
CREATE INDEX index_design_management_versions_on_author_id ON design_management_versions USING btree (author_id) WHERE (author_id IS NOT NULL);
CREATE INDEX index_design_management_versions_on_issue_id ON design_management_versions USING btree (issue_id);
CREATE INDEX index_design_management_versions_on_issue_id_and_id ON design_management_versions USING btree (issue_id, id);
CREATE INDEX index_design_management_versions_on_namespace_id ON design_management_versions USING btree (namespace_id);
@ -32825,6 +32825,8 @@ CREATE INDEX index_observability_traces_issues_connections_on_project_id ON obse
CREATE UNIQUE INDEX index_on_deploy_keys_id_and_type_and_public ON keys USING btree (id, type) WHERE (public = true);
CREATE INDEX index_on_design_management_designs_issue_id_and_id ON design_management_designs USING btree (issue_id, id);
CREATE INDEX index_on_dingtalk_tracker_data_corpid ON dingtalk_tracker_data USING btree (corpid) WHERE (corpid IS NOT NULL);
COMMENT ON INDEX index_on_dingtalk_tracker_data_corpid IS 'JiHu-specific index';

View File

@ -108,7 +108,7 @@ To configure a self-hosted model:
|-------------|---------|---------|
| vLLM | `custom_openai/<name of the model served through vLLM>` | `custom_openai/Mixtral-8x7B-Instruct-v0.1` |
| Bedrock | `bedrock/<model ID of the model>` | `bedrock/mistral.mixtral-8x7b-instruct-v0:1` |
| Azure | `azure/<model ID of the model>` | `azure/gpt-35-turbo` |
| Azure OpenAI | `azure/<model ID of the model>` | `azure/gpt-35-turbo` |
| Others | The field is optional | |
1. Select **Create self-hosted model**.

View File

@ -239,7 +239,7 @@ Prefer using `additional_properties` instead.
#### Composable matchers
When a singe action triggers an event multiple times, triggers multiple different events, or increments some metrics but not others for the event,
you can use the `trigger_internal_events` and `increment_usage_metrics` matchers.
you can use the `trigger_internal_events` and `increment_usage_metrics` matchers on a block argument.
```ruby
expect { subject }
@ -293,6 +293,8 @@ Or you can use the `not_to` syntax:
expect { subject }.not_to trigger_internal_events('mr_created', 'member_role_created')
```
The `trigger_internal_events` matcher can also be used for testing [Haml with data attributes](#haml-with-data-attributes).
### Frontend tracking
Any frontend tracking call automatically passes the values `user.id`, `namespace.id`, and `project.id` from the current context of the page.
@ -579,40 +581,65 @@ describe('DeleteApplication', () => {
#### Haml with data attributes
If you are using the data attributes to register tracking at the Haml layer,
you can use the `have_internal_tracking` matcher method to assert if expected data attributes are assigned.
If you are using [data attributes](#data-event-attribute) to track internal events at the Haml layer,
you can use the [`trigger_internal_events` matcher](#composable-matchers) to assert that the expected properties are present.
For example, if we need to test the below Haml,
For example, if you need to test the below Haml,
```haml
%div{ data: { testid: '_testid_', event_tracking: 'render', event_label: '_tracking_label_' } }
%div{ data: { testid: '_testid_', event_tracking: 'some_event', event_label: 'some_label' } }
```
You can call assertions on any rendered HTML compatible with the `have_css` matcher.
Use the `:on_click` and `:on_load` chain methods to indicate when you expect the event to trigger.
Below would be the test case for above haml.
- [RSpec view specs](https://rspec.info/features/6-0/rspec-rails/view-specs/view-spec/)
- rendered HTML is a `String` ([RSpec views](https://rspec.info/features/6-0/rspec-rails/view-specs/view-spec/))
```ruby
it 'assigns the tracking items' do
render
expect(rendered).to have_internal_tracking(event: 'render', label: '_tracking_label_', testid: '_testid_')
expect(rendered).to trigger_internal_events('some_event').on_click
.with(additional_properties: { label: 'some_label' })
end
```
- [ViewComponent](https://viewcomponent.org/) specs
- rendered HTML is a `Capybara::Node::Simple` ([ViewComponent](https://viewcomponent.org/))
```ruby
it 'assigns the tracking items' do
render_inline(component)
expect(page).to have_internal_tracking(event: 'render', label: '_tracking_label_', testid: '_testid_')
expect(page.find_by_testid('_testid_'))
.to trigger_internal_events('some_event').on_click
.with(additional_properties: { label: 'some_label' })
end
```
`event` is required for the matcher and `label`/`testid` are optional.
It is recommended to use `testid` when possible for exactness.
When you want to ensure that tracking isn't assigned, you can use `not_to` with the above matchers.
- rendered HTML is a `Nokogiri::HTML4::DocumentFragment` ([ViewComponent](https://viewcomponent.org/))
```ruby
it 'assigns the tracking items' do
expect(render_inline(component))
.to trigger_internal_events('some_event').on_click
.with(additional_properties: { label: 'some_label' })
end
```
Or you can use the `not_to` syntax:
```ruby
it 'assigns the tracking items' do
render_inline(component)
expect(page).not_to trigger_internal_events
end
```
When negated, the matcher accepts no additional chain methods or arguments.
This asserts that no tracking attributes are in use.
### Using Internal Events API

View File

@ -50,7 +50,7 @@ For example, if you use
you can identify a user by email:
```python
Sentry.setUser({ email: "john.doe@example.com" });
sentry_sdk.set_user({ email: "john.doe@example.com" });
```
For more information about user identification, see the [Sentry documentation](https://docs.sentry.io/).

View File

@ -158,6 +158,10 @@ GEM
rake (13.2.1)
rdoc (6.8.1)
psych (>= 4.0.0)
redis (5.3.0)
redis-client (>= 0.22.0)
redis-client (0.23.1)
connection_pool
regexp_parser (2.9.3)
reline (0.5.12)
io-console (~> 0.5)
@ -241,6 +245,7 @@ DEPENDENCIES
gitlab-active-context!
gitlab-styles
rake (~> 13.0)
redis
rspec (~> 3.0)
rspec-rails
rubocop

View File

@ -87,6 +87,63 @@ ActiveContext.raw_queues
=> ["ai_context_queues:{merge_request}:0", "ai_context_queues:{merge_request}:1"]
```
### Adding a new collection
A collection maps data to references and specifies a queue to track its references.
To add a new collection:
1. Create a new file in the appropriate directory
1. Define a class that `includes ActiveContext::Concerns::Collection`
1. Implement the `self.queue` class method to return the associated queue
1. Implement the `references` instance method to return the references for an object
Example:
```ruby
module Ai
module Context
module Collections
class MergeRequest
include ActiveContext::Concerns::Collection
def self.queue
Queues::MergeRequest
end
def references
[Search::Elastic::References::Embedding.serialize(object)]
end
end
end
end
end
```
Adding references to the queue can be done a few ways:
```ruby
Ai::Context::Collections::MergeRequest.track!(MergeRequest.first)
```
```ruby
Ai::Context::Collections::MergeRequest.track!(MergeRequest.take(10))
```
```ruby
ActiveContext.track!(MergeRequest.first, collection: Ai::Context::Collections::MergeRequest)
```
```ruby
ActiveContext.track!(MergeRequest.first, collection: Ai::Context::Collections::MergeRequest, queue: Ai::Context::Queues::Default)
```
To view all tracked references:
```ruby
ActiveContext::Queues.all_queued_items
```
## Contributing
### Development guidelines

View File

@ -29,6 +29,7 @@ Gem::Specification.new do |spec|
spec.add_development_dependency 'aws-sdk-core'
spec.add_development_dependency 'faraday_middleware-aws-sigv4'
spec.add_development_dependency 'gitlab-styles'
spec.add_development_dependency 'redis'
spec.add_development_dependency 'rspec-rails'
spec.add_development_dependency 'rubocop-rspec'
spec.add_development_dependency 'webmock'

View File

@ -28,4 +28,8 @@ module ActiveContext
def self.raw_queues
ActiveContext::Queues.raw_queues
end
def self.track!(*objects, collection:, queue: nil)
ActiveContext::Tracker.track!(*objects, collection: collection, queue: queue)
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
module ActiveContext
module Concerns
module Collection
extend ActiveSupport::Concern
class_methods do
def track!(*objects)
ActiveContext::Tracker.track!(objects, collection: self)
end
def queue
raise NotImplementedError
end
end
attr_reader :object
def initialize(object)
@object = object
end
def references
raise NotImplementedError
end
end
end
end

View File

@ -9,6 +9,9 @@ module ActiveContext
end
module ClassMethods
SLICE_SIZE = 1_000
SHARD_LIMIT = 1_000
def number_of_shards
raise NotImplementedError
end
@ -17,10 +20,84 @@ module ActiveContext
ActiveContext::Queues.register!(redis_key, shards: number_of_shards)
end
def push(references)
refs_by_shard = references.group_by { |ref| ActiveContext::Shard.shard_number(number_of_shards, ref) }
ActiveContext::Redis.with_redis do |redis|
refs_by_shard.each do |shard_number, shard_items|
set_key = redis_set_key(shard_number)
max = redis.incrby(redis_score_key(shard_number), shard_items.size)
min = (max - shard_items.size) + 1
(min..max).zip(shard_items).each_slice(SLICE_SIZE) do |group|
redis.zadd(set_key, group)
end
end
end
end
def queue_size
ActiveContext::Redis.with_redis do |redis|
queue_shards.sum do |shard_number|
redis.zcard(redis_set_key(shard_number))
end
end
end
def queued_items
{}.tap do |hash|
ActiveContext::Redis.with_redis do |redis|
each_queued_items_by_shard(redis) do |shard_number, specs|
hash[shard_number] = specs unless specs.empty?
end
end
end
end
def each_queued_items_by_shard(redis, shards: queue_shards)
(shards & queue_shards).each do |shard_number|
set_key = redis_set_key(shard_number)
specs = redis.zrangebyscore(set_key, '-inf', '+inf', limit: [0, shard_limit], with_scores: true)
yield shard_number, specs
end
end
def clear_tracking!
ActiveContext::Redis.with_redis do |redis|
::Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
keys = queue_shards.map { |m| [redis_set_key(m), redis_score_key(m)] }.flatten # rubocop:disable Performance/FlatMap -- more than one level
if ::Gitlab::Redis::ClusterUtil.cluster?(redis)
::Gitlab::Redis::ClusterUtil.batch_unlink(keys, redis)
else
redis.unlink(*keys)
end
end
end
end
def queue_shards
0.upto(number_of_shards - 1).to_a
end
def shard_limit
SHARD_LIMIT
end
def redis_key
"#{prefix}:{#{queue_name}}"
end
def redis_set_key(shard_number)
"#{redis_key}:#{shard_number}:zset"
end
def redis_score_key(shard_number)
"#{redis_key}:#{shard_number}:score"
end
def queue_name
name_elements[-1].underscore
end

View File

@ -22,5 +22,17 @@ module ActiveContext
@raw_queues << "#{key}:#{shard}"
end
end
def self.all_queued_items
{}.tap do |hash|
@raw_queues&.each do |queue_key|
references = ActiveContext::Redis.with_redis do |redis|
queue_key = "#{queue_key}:zset"
redis.zrangebyscore(queue_key, '-inf', '+inf')
end
hash[queue_key] = references if references.present?
end
end
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
module ActiveContext
class Redis
def self.with_redis(&blk)
Gitlab::Redis::SharedState.with(&blk)
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
module ActiveContext
class Shard
def self.shard_number(number_of_shards, data)
Digest::SHA256.hexdigest(data).hex % number_of_shards # rubocop: disable Fips/OpenSSL -- used for data distribution, not for security
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
module ActiveContext
class Tracker
class << self
def track!(*objects, collection:, queue: nil)
references = collect_references(objects.flatten, collection)
return 0 if references.empty?
queue_to_use = queue || collection.queue
queue_to_use.push(references)
references.count
end
private
def collect_references(objects, collection)
objects.flat_map do |obj|
collection.new(obj).references
end
end
end
end
end

View File

@ -0,0 +1,126 @@
# frozen_string_literal: true
RSpec.describe ActiveContext::Concerns::Queue do
let(:mock_queue_class) do
Class.new do
def self.name
'MockModule::TestQueue'
end
def self.number_of_shards
2
end
include ActiveContext::Concerns::Queue
end
end
let(:redis_double) { instance_double(Redis) }
before do
clear_all_queues!
allow(ActiveContext::Redis).to receive(:with_redis).and_yield(redis_double)
end
describe '.register!' do
it 'registers the queue with ActiveContext::Queues' do
expect(ActiveContext::Queues).to receive(:register!).with('mockmodule:{test_queue}', shards: 2)
mock_queue_class
end
end
describe '.push' do
it 'pushes references to Redis' do
references = %w[ref1 ref2 ref3]
allow(ActiveContext::Shard).to receive(:shard_number).and_return(0, 1, 0)
expect(redis_double).to receive(:incrby).with('mockmodule:{test_queue}:0:score', 2).and_return(2)
expect(redis_double).to receive(:incrby).with('mockmodule:{test_queue}:1:score', 1).and_return(1)
expect(redis_double).to receive(:zadd).with('mockmodule:{test_queue}:0:zset', [[1, 'ref1'], [2, 'ref3']])
expect(redis_double).to receive(:zadd).with('mockmodule:{test_queue}:1:zset', [[1, 'ref2']])
mock_queue_class.push(references)
end
end
describe '.queue_size' do
it 'returns the total size of all shards' do
expect(redis_double).to receive(:zcard).with('mockmodule:{test_queue}:0:zset').and_return(5)
expect(redis_double).to receive(:zcard).with('mockmodule:{test_queue}:1:zset').and_return(3)
expect(mock_queue_class.queue_size).to eq(8)
end
end
describe '.queued_items' do
it 'returns items from all non-empty shards' do
expect(redis_double).to receive(:zrangebyscore)
.with('mockmodule:{test_queue}:0:zset', '-inf', '+inf', limit: [0, anything], with_scores: true)
.and_return([['ref1', 1.0], ['ref2', 2.0]])
expect(redis_double).to receive(:zrangebyscore)
.with('mockmodule:{test_queue}:1:zset', '-inf', '+inf', limit: [0, anything], with_scores: true)
.and_return([])
expect(mock_queue_class.queued_items).to eq({
0 => [['ref1', 1.0], ['ref2', 2.0]]
})
end
end
describe '.clear_tracking!' do
# rubocop: disable RSpec/VerifiedDoubleReference -- stubbing GitLab logic
let(:redis_cluster_validator) { class_double("Gitlab::Instrumentation::RedisClusterValidator").as_stubbed_const }
let(:redis_cluster_util) { class_double("Gitlab::Redis::ClusterUtil").as_stubbed_const }
# rubocop: enable RSpec/VerifiedDoubleReference
before do
allow(redis_cluster_validator).to receive(:allow_cross_slot_commands).and_yield
end
context 'when Redis is not in cluster mode' do
before do
allow(redis_cluster_util).to receive(:cluster?).and_return(false)
end
it 'calls unlink directly on redis' do
expect(redis_double).to receive(:unlink)
.with(
'mockmodule:{test_queue}:0:zset', 'mockmodule:{test_queue}:0:score',
'mockmodule:{test_queue}:1:zset', 'mockmodule:{test_queue}:1:score'
)
mock_queue_class.clear_tracking!
end
end
context 'when Redis is in cluster mode' do
before do
allow(redis_cluster_util).to receive(:cluster?).and_return(true)
end
it 'calls batch_unlink on ClusterUtil' do
expect(redis_cluster_util).to receive(:batch_unlink)
.with(
[
'mockmodule:{test_queue}:0:zset', 'mockmodule:{test_queue}:0:score',
'mockmodule:{test_queue}:1:zset', 'mockmodule:{test_queue}:1:score'
],
redis_double
)
mock_queue_class.clear_tracking!
end
end
end
describe '.redis_key' do
it 'returns the correct Redis key' do
expect(mock_queue_class.redis_key).to eq('mockmodule:{test_queue}')
end
end
def clear_all_queues!
ActiveContext::Queues.instance_variable_set(:@queues, Set.new)
ActiveContext::Queues.instance_variable_set(:@raw_queues, [])
end
end

View File

@ -0,0 +1,91 @@
# frozen_string_literal: true
RSpec.describe ActiveContext::Tracker do
let(:mock_collection) do
Class.new do
include ActiveContext::Concerns::Collection
def self.queue
@queue ||= []
end
def references
["ref_#{object}"]
end
end
end
let(:mock_queue) { [] }
describe '.track!' do
context 'with single object' do
it 'tracks references and returns count' do
result = described_class.track!('test', collection: mock_collection)
expect(result).to eq(1)
expect(mock_collection.queue).to contain_exactly(['ref_test'])
end
end
context 'with multiple objects' do
it 'tracks references for all objects and returns total count' do
result = described_class.track!('test1', 'test2', collection: mock_collection)
expect(result).to eq(2)
expect(mock_collection.queue).to contain_exactly(%w[ref_test1 ref_test2])
end
end
context 'with nested arrays' do
it 'flattens arrays and tracks all references' do
result = described_class.track!(['test1', %w[test2 test3]], collection: mock_collection)
expect(result).to eq(3)
expect(mock_collection.queue).to contain_exactly(%w[ref_test1 ref_test2 ref_test3])
end
end
context 'with empty input' do
it 'returns zero and does not modify queue' do
result = described_class.track!([], collection: mock_collection)
expect(result).to eq(0)
expect(mock_collection.queue).to be_empty
end
end
context 'with custom queue' do
it 'uses provided queue instead of collection queue' do
result = described_class.track!('test', collection: mock_collection, queue: mock_queue)
expect(result).to eq(1)
expect(mock_queue).to contain_exactly(['ref_test'])
expect(mock_collection.queue).to be_empty
end
end
context 'when collection does not implement queue method' do
let(:invalid_collection) do
Class.new do
include ActiveContext::Concerns::Collection
def references
["ref"]
end
end
end
it 'raises NotImplementedError' do
expect do
described_class.track!('test', collection: invalid_collection)
end.to raise_error(NotImplementedError)
end
end
end
describe '.collect_references' do
it 'is a private method' do
expect(described_class.private_methods).to include(:collect_references)
end
end
end

View File

@ -5,6 +5,8 @@ require 'logger'
require 'elasticsearch'
require 'opensearch'
require 'aws-sdk-core'
require 'active_support/concern'
require 'redis'
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure

View File

@ -2413,7 +2413,7 @@ msgstr ""
msgid "AI|Write a summary to fill out the selected issue template"
msgstr ""
msgid "AI|Your request does not seem to contain code to %{action}. To %{human_name} select the lines of code in your %{platform} and then type the command %{command_name} in the chat. You may add additional instructions after this command. If you have no code to select, you can also simply add the code after the command."
msgid "AI|Your request does not seem to contain code to %{action}. To %{human_name} select the lines of code in your editor and then type the command %{command_name} in the chat. You may add additional instructions after this command. If you have no code to select, you can also simply add the code after the command."
msgstr ""
msgid "API"
@ -36666,7 +36666,7 @@ msgstr ""
msgid "Multiple Prometheus integrations are not supported"
msgstr ""
msgid "Multiple components '%{name}' have 'gl/inject-editor' attribute"
msgid "Multiple components '%{name}' have '%{attribute}' attribute"
msgstr ""
msgid "Multiple integrations of a single type are not supported for this project"
@ -37548,7 +37548,7 @@ msgstr ""
msgid "No committers"
msgstr ""
msgid "No component has 'gl/inject-editor' attribute"
msgid "No component has '%{attribute}' attribute"
msgstr ""
msgid "No components present in devfile"
@ -62764,6 +62764,9 @@ msgstr ""
msgid "Viewing commit"
msgstr ""
msgid "Violates security policy"
msgstr ""
msgid "Violation"
msgstr ""

View File

@ -66,6 +66,7 @@
"@gitlab/vue-router-vue3": "npm:vue-router@4.1.6",
"@gitlab/vuex-vue3": "npm:vuex@4.0.0",
"@gitlab/web-ide": "^0.0.1-dev-20250109231656",
"@gleam-lang/highlight.js-gleam": "^1.5.0",
"@mattiasbuelens/web-streams-adapter": "^0.1.0",
"@rails/actioncable": "7.0.807",
"@rails/ujs": "7.0.807",

View File

@ -248,6 +248,7 @@ spec/frontend/releases/components/app_edit_new_spec.js
spec/frontend/releases/components/asset_links_form_spec.js
spec/frontend/releases/components/tag_field_exsting_spec.js
spec/frontend/repository/components/header_area/blob_controls_spec.js
spec/frontend/repository/components/header_area/blob_overflow_menu_spec.js
spec/frontend/repository/components/table/index_spec.js
spec/frontend/repository/components/table/row_spec.js
spec/frontend/repository/router_spec.js

View File

@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'open3'
require 'yaml'
class MigrationSchemaValidator
FILENAME = 'db/structure.sql'
@ -9,10 +10,33 @@ class MigrationSchemaValidator
SCHEMA_VERSION_DIR = 'db/schema_migrations'
MODELS_DIR = 'app/models'
EE_MODELS_DIR = 'ee/app/models'
DB_DOCS_DIR = 'db/docs'
DOC_URL = "https://docs.gitlab.com/ee/development/database/avoiding_downtime_in_migrations.html"
VERSION_DIGITS = 14
SKIP_VALIDATION_LABEL = 'pipeline:skip-check-migrations'
MIGRATION_METHODS = %w[
cleanup_concurrent_column_rename
cleanup_conversion_of_integer_to_bigint
rename_column
rename_column_concurrently
remove_column
].freeze
MIGRATION_METHODS_REGEX = /(#{MIGRATION_METHODS.join('|')})[(\s]/
TABLE_AND_COLUMN_NAME_REGEX = /(?:#{MIGRATION_METHODS.join('|')})\s+:(\w+),\s+:(\w+)/
UP_OR_CHANGE_METHOD_REGEX = /def (?:up|change)(.*?)end/m
PERMITTED_YAML_CLASSES = [String, Array, Hash].freeze
def initialize
@models_missing_ignore = Hash.new { |h, k| h[k] = [] }
end
def validate!
if committed_migrations.empty?
puts "\e[32m No migrations found, skipping schema validation\e[0m"
@ -24,6 +48,9 @@ class MigrationSchemaValidator
return
end
# TODO: validate_ignore_columns! should never be skipped, the ignore_column directive must always be present
# issue: https://gitlab.com/gitlab-org/gitlab/-/issues/512680
validate_ignore_columns!
validate_schema_on_rollback!
validate_schema_on_migrate!
validate_schema_version_files!
@ -35,6 +62,86 @@ class MigrationSchemaValidator
ENV.fetch('CI_MERGE_REQUEST_LABELS', '').split(',').include?(SKIP_VALIDATION_LABEL)
end
def validate_ignore_columns!
base_message = <<~MSG.freeze
Column operations, like dropping, renaming or primary key conversion, require columns to be ignored in
the model. This step is necessary because Rails caches the columns and re-uses it in various places across the
application. Refer to these pages for more information:
#{DOC_URL}#dropping-columns
#{DOC_URL}#renaming-columns
#{DOC_URL}#migrating-integer-primary-keys-to-bigint
Please ensure that columns are properly ignored in the models
MSG
committed_migrations.each do |file_name|
check_file(file_name)
end
return if @models_missing_ignore.empty?
models_missing_text = @models_missing_ignore.map { |key, values| "#{key}: #{values.join(', ')}" }.join("\n")
die "#{base_message}\n#{models_missing_text}"
end
def extract_up_or_change_method(file_content)
method_content = file_content.match(UP_OR_CHANGE_METHOD_REGEX)
method_content ? method_content[1].strip : nil
end
def check_file(file_path)
return unless File.exist?(file_path)
file_content = File.read(file_path)
method_content = extract_up_or_change_method(file_content)
return unless method_content&.match?(MIGRATION_METHODS_REGEX)
table_column_pairs = method_content.scan(TABLE_AND_COLUMN_NAME_REGEX)
table_column_pairs.each do |table, column|
model_name = model(table)
next unless model_name
model_file_path = model_path(model_name)
next unless File.exist?(model_file_path)
model_content = File.read(model_file_path)
next if model_content.match?(/\s(ignore_column|ignore_columns)\s(:|%i\[)\s*#{column}/)
@models_missing_ignore[model_name.to_s] << column
end
end
def model(table_name)
db_docs_file = File.join(DB_DOCS_DIR, "#{table_name}.yml")
return unless File.exist?(db_docs_file)
data = YAML.safe_load(File.read(db_docs_file), permitted_classes: PERMITTED_YAML_CLASSES)
data['classes'].first
rescue Psych::DisallowedClass => e
puts "Error: Unexpected object type in YAML file for table '#{table_name}': #{e.message}"
nil
end
def model_path(model)
nested_model = underscore(model)
file_path = File.join(MODELS_DIR, "#{nested_model}.rb")
if File.exist?(file_path)
file_path
else
File.join(EE_MODELS_DIR, "#{nested_model}.rb")
end
end
def underscore(str)
str.gsub(/::/, '/')
.gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
.gsub(/([a-z\d])([A-Z])/, '\1_\2')
.tr("-", "_")
.downcase
end
def validate_schema_on_rollback!
committed_migrations.reverse_each do |filename|
version = find_migration_version(filename)

View File

@ -29,6 +29,10 @@ RSpec.describe 'File blob', :js, feature_category: :source_code_management do
).execute
end
before do
stub_feature_flags(blob_overflow_menu: false)
end
context 'Ruby file' do
before do
visit_blob('files/ruby/popen.rb')

View File

@ -17,6 +17,8 @@ RSpec.describe "User browses files", :js, feature_category: :source_code_managem
before do
sign_in(user)
stub_feature_flags(blob_overflow_menu: false)
end
it "shows last commit for current directory", :js do

View File

@ -41,6 +41,8 @@ RSpec.describe 'View on environment', :js, feature_category: :groups_and_project
file_path: file_path,
file_content: '# Noop'
).execute
stub_feature_flags(blob_overflow_menu: false)
end
context 'and an active deployment' do

View File

@ -8,7 +8,7 @@ import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { UPDATE_COMMENT_FORM } from '~/notes/i18n';
import { createTestPiniaAction, createCustomGetters } from 'helpers/pinia_helpers';
import { globalAccessorPlugin } from '~/pinia';
import { globalAccessorPlugin } from '~/pinia/plugins';
import { useBatchComments } from '~/batch_comments/store';
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
import { useNotes } from '~/notes/store/legacy_notes';

View File

@ -22,6 +22,7 @@ describe('Blob Header Default Actions', () => {
const defaultProvide = {
blobHash: 'foo-bar',
glFeatures: { blobOverflowMenu: true },
};
const findDefaultActions = () => wrapper.findComponent(DefaultActions);
@ -126,6 +127,12 @@ describe('Blob Header Default Actions', () => {
});
});
it('does not render DefaultActions when on blob page', () => {
createComponent({ propsData: { isBlobPage: true } });
expect(findDefaultActions().exists()).toBe(false);
});
it.each([[{ showBlameToggle: true }], [{ showBlameToggle: false }]])(
'passes the `showBlameToggle` prop to the viewer switcher',
(propsData) => {
@ -153,15 +160,6 @@ describe('Blob Header Default Actions', () => {
expect(findViewSwitcher().exists()).toBe(false);
});
it('does not render default actions is corresponding prop is passed', () => {
createComponent({
propsData: {
hideDefaultActions: true,
},
});
expect(findDefaultActions().exists()).toBe(false);
});
it.each`
slotContent | key
${'Foo Prepend'} | ${'prepend'}

View File

@ -40,7 +40,7 @@ import { handleLocationHash, historyPushState, scrollToElement } from '~/lib/uti
import setWindowLocation from 'helpers/set_window_location_helper';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import { useNotes } from '~/notes/store/legacy_notes';
import { globalAccessorPlugin } from '~/pinia';
import { globalAccessorPlugin } from '~/pinia/plugins';
import { diffMetadata } from '../../mock_data/diff_metadata';
jest.mock('~/alert');

View File

@ -12,7 +12,7 @@ import setWindowLocation from 'helpers/set_window_location_helper';
import { createCustomGetters } from 'helpers/pinia_helpers';
import { useMrNotes } from '~/mr_notes/store/legacy_mr_notes';
import { useNotes } from '~/notes/store/legacy_notes';
import { globalAccessorPlugin } from '~/pinia';
import { globalAccessorPlugin } from '~/pinia/plugins';
import discussion from '../../mock_data/diff_discussions';
import diffsMockData from '../../mock_data/merge_request_diffs';

View File

@ -26,7 +26,7 @@ import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
import { useNotes } from '~/notes/store/legacy_notes';
import { createCustomGetters, createTestPiniaAction } from 'helpers/pinia_helpers';
import { useBatchComments } from '~/batch_comments/store';
import { globalAccessorPlugin } from '~/pinia';
import { globalAccessorPlugin } from '~/pinia/plugins';
import {
discussionMock,
notesDataMock,

View File

@ -4,7 +4,7 @@ import { DESC, ASC, NOTEABLE_TYPE_MAPPING } from '~/notes/constants';
import { createCustomGetters } from 'helpers/pinia_helpers';
import { useNotes } from '~/notes/store/legacy_notes';
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
import { globalAccessorPlugin } from '~/pinia';
import { globalAccessorPlugin } from '~/pinia/plugins';
import { useBatchComments } from '~/batch_comments/store';
import {
notesDataMock,

View File

@ -2,12 +2,12 @@
import Vuex from 'vuex';
import Vue from 'vue';
import { createPinia, defineStore, setActivePinia } from 'pinia';
import { syncWithVuex } from '~/pinia';
import { syncWithVuex } from '~/pinia/plugins';
import waitForPromises from 'helpers/wait_for_promises';
Vue.use(Vuex);
describe('Pinia helpers', () => {
describe('Pinia plugins', () => {
describe('syncWithVuex', () => {
let vuexStore;
let usePiniaStore;

View File

@ -1,6 +1,5 @@
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import BlobControls from '~/repository/components/header_area/blob_controls.vue';
@ -13,6 +12,7 @@ import { resetShortcutsForTests } from '~/behaviors/shortcuts';
import ShortcutsBlob from '~/behaviors/shortcuts/shortcuts_blob';
import Shortcuts from '~/behaviors/shortcuts/shortcuts';
import BlobLinePermalinkUpdater from '~/blob/blob_line_permalink_updater';
import OverflowMenu from '~/repository/components/header_area/blob_overflow_menu.vue';
import { blobControlsDataMock, refMock } from '../../mock_data';
jest.mock('~/repository/utils/dom');
@ -23,24 +23,45 @@ let router;
let wrapper;
let mockResolver;
const createComponent = async () => {
const createComponent = async (
props = {},
blobInfoOverrides = {},
glFeatures = { blobOverflowMenu: false },
) => {
Vue.use(VueApollo);
const project = { ...blobControlsDataMock };
const projectPath = 'some/project';
router = createRouter(projectPath, refMock);
router.replace({ name: 'blobPath', params: { path: '/some/file.js' } });
mockResolver = jest.fn().mockResolvedValue({ data: { project } });
mockResolver = jest.fn().mockResolvedValue({
data: {
project: {
id: '1234',
repository: {
blobs: {
nodes: [{ ...blobControlsDataMock.repository.blobs.nodes[0], ...blobInfoOverrides }],
},
},
},
},
});
await resetShortcutsForTests();
wrapper = shallowMountExtended(BlobControls, {
router,
apolloProvider: createMockApollo([[blobControlsQuery, mockResolver]]),
propsData: { projectPath },
provide: {
glFeatures,
},
propsData: {
projectPath,
isBinary: false,
refType: 'heads',
...props,
},
mixins: [{ data: () => ({ ref: refMock }) }],
});
@ -51,29 +72,56 @@ describe('Blob controls component', () => {
const findFindButton = () => wrapper.findByTestId('find');
const findBlameButton = () => wrapper.findByTestId('blame');
const findPermalinkButton = () => wrapper.findByTestId('permalink');
const findOverflowMenu = () => wrapper.findComponent(OverflowMenu);
const { bindInternalEventDocument } = useMockInternalEventsTracking();
beforeEach(() => createComponent());
it('triggers a `focusSearchFile` shortcut when the findFile button is clicked', () => {
const findFileButton = findFindButton();
jest.spyOn(Shortcuts, 'focusSearchFile').mockResolvedValue();
findFileButton.vm.$emit('click');
describe('FindFile button', () => {
it('renders FindFile button', () => {
expect(findFindButton().exists()).toBe(true);
});
expect(Shortcuts.focusSearchFile).toHaveBeenCalled();
it('triggers a `focusSearchFile` shortcut when the findFile button is clicked', () => {
const findFileButton = findFindButton();
jest.spyOn(Shortcuts, 'focusSearchFile').mockResolvedValue();
findFileButton.vm.$emit('click');
expect(Shortcuts.focusSearchFile).toHaveBeenCalled();
});
it('emits a tracking event when the Find file button is clicked', () => {
const { trackEventSpy } = bindInternalEventDocument(wrapper.element);
jest.spyOn(Shortcuts, 'focusSearchFile').mockResolvedValue();
findFindButton().vm.$emit('click');
expect(trackEventSpy).toHaveBeenCalledWith('click_find_file_button_on_repository_pages');
});
});
it('emits a tracking event when the Find file button is clicked', () => {
const { trackEventSpy } = bindInternalEventDocument(wrapper.element);
jest.spyOn(Shortcuts, 'focusSearchFile').mockResolvedValue();
describe('Blame button', () => {
it('renders a blame button with the correct href', () => {
expect(findBlameButton().attributes('href')).toBe('blame/file.js');
});
findFindButton().vm.$emit('click');
it('does not render blame button when blobInfo.storedExternally is true', async () => {
await createComponent({}, { storedExternally: true });
expect(trackEventSpy).toHaveBeenCalledWith('click_find_file_button_on_repository_pages');
});
expect(findBlameButton().exists()).toBe(false);
});
it('renders a blame button with the correct href', () => {
expect(findBlameButton().attributes('href')).toBe('blame/file.js');
it('does not render blame button when blobInfo.externalStorage is "lfs"', async () => {
await createComponent({}, { externalStorage: 'lfs' });
expect(findBlameButton().exists()).toBe(false);
});
it('renders blame button when blobInfo.storedExternally is false and externalStorage is not "lfs"', async () => {
await createComponent({}, { storedExternally: false, externalStorage: null });
expect(findBlameButton().exists()).toBe(true);
});
});
it('renders a permalink button with the correct href', () => {
@ -105,4 +153,56 @@ describe('Blob controls component', () => {
it('loads the BlobLinePermalinkUpdater', () => {
expect(BlobLinePermalinkUpdater).toHaveBeenCalled();
});
describe('BlobOverflow dropdown', () => {
it('renders BlobOverflow component with correct props', async () => {
await createComponent({}, {}, { blobOverflowMenu: true });
expect(findOverflowMenu().exists()).toBe(true);
expect(findOverflowMenu().props()).toEqual({
rawPath: 'https://testing.com/flightjs/flight/snippets/51/raw',
isBinary: true,
environmentName: '',
environmentPath: '',
isEmpty: false,
overrideCopy: true,
simpleViewer: {
renderError: null,
tooLarge: false,
type: 'simple',
fileType: 'rich',
},
richViewer: {
renderError: 'too big file',
tooLarge: false,
type: 'rich',
fileType: 'rich',
},
});
});
it('passes the correct isBinary value to BlobOverflow when viewing a binary file', async () => {
await createComponent(
{ isBinary: true },
{
simpleViewer: {
...blobControlsDataMock.repository.blobs.nodes[0].simpleViewer,
fileType: 'podfile',
},
},
{ blobOverflowMenu: true },
);
expect(findOverflowMenu().props('isBinary')).toBe(true);
});
it('copies to clipboard raw blob text, when receives copy event', async () => {
await createComponent({}, {}, { blobOverflowMenu: true });
jest.spyOn(navigator.clipboard, 'writeText');
findOverflowMenu().vm.$emit('copy');
expect(navigator.clipboard.writeText).toHaveBeenCalledWith('Example raw text content');
});
});
});

View File

@ -1,6 +1,8 @@
import { GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
import BlobOverflowMenu from '~/repository/components/header_area/blob_overflow_menu.vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BlobOverflowMenu from '~/repository/components/header_area/blob_overflow_menu.vue';
import createRouter from '~/repository/router';
import { refMock } from '../../mock_data';
const Blob = {
binary: false,
@ -36,16 +38,25 @@ const mockEnvironmentPath = 'https://my.testing.environment';
describe('Blob Overflow Menu', () => {
let wrapper;
const projectPath = '/some/project';
const router = createRouter(projectPath, refMock);
router.replace({ name: 'blobPath', params: { path: '/some/file.js' } });
const blobHash = 'foo-bar';
function createComponent(propsData = {}, provided = {}) {
wrapper = shallowMountExtended(BlobOverflowMenu, {
router,
provide: {
blobHash,
...provided,
},
propsData: {
rawPath: Blob.rawPath,
richViewer: Blob.richViewer,
simpleViewer: Blob.simpleViewer,
isBinary: false,
...propsData,
},
stub: {
@ -83,9 +94,12 @@ describe('Blob Overflow Menu', () => {
});
it('renders "Copy file contents" button as disabled if the viewer is Rich', () => {
createComponent({
activeViewer: 'rich',
});
// Create rich viewer element in DOM
const richViewer = document.createElement('div');
richViewer.className = 'blob-viewer';
richViewer.dataset.type = 'rich';
document.body.appendChild(richViewer);
createComponent();
expect(findCopyFileContentItem().props('item')).toMatchObject({
extraAttrs: { disabled: true },
@ -94,7 +108,10 @@ describe('Blob Overflow Menu', () => {
it('does not render the copy button if a rendering error is set', () => {
createComponent({
hasRenderError: true,
richViewer: {
...Blob.richViewer,
renderError: 'File too big',
},
});
expect(findDropdownItemWithText('Copy file contents')).toBeUndefined();

View File

@ -29,9 +29,16 @@ describe('HighlightMixin', () => {
const contentArray = Array.from({ length: 140 }, () => 'newline'); // simulate 140 lines of code
const rawTextBlob = contentArray.join('\n');
const languageMock = 'json';
const nameMock = 'test.json';
const createComponent = (
{ fileType = TEXT_FILE_TYPE, language = languageMock, externalStorageUrl, rawPath } = {},
{
fileType = TEXT_FILE_TYPE,
language = languageMock,
name = nameMock,
externalStorageUrl,
rawPath,
} = {},
isUsingLfs = false,
) => {
const simpleViewer = { fileType };
@ -49,7 +56,7 @@ describe('HighlightMixin', () => {
</div>`,
created() {
this.initHighlightWorker(
{ rawTextBlob, simpleViewer, language, fileType, externalStorageUrl, rawPath },
{ rawTextBlob, simpleViewer, language, name, fileType, externalStorageUrl, rawPath },
isUsingLfs,
);
},
@ -161,4 +168,16 @@ describe('HighlightMixin', () => {
expect(workerMock.postMessage).toHaveBeenCalledWith(mockParams);
});
});
describe('Gleam language handling', () => {
beforeEach(() => workerMock.postMessage.mockClear());
it('sets language to gleam for .gleam files regardless of passed language', () => {
createComponent({ language: 'plaintext', name: 'test.gleam' });
expect(workerMock.postMessage.mock.calls[0][0]).toMatchObject({
language: 'gleam',
});
});
});
});

View File

@ -98,7 +98,27 @@ export const blobControlsDataMock = {
blamePath: 'blame/file.js',
permalinkPath: 'permalink/file.js',
storedExternally: false,
externalStorage: '',
externalStorage: 'https://external-storage',
environmentFormattedExternalUrl: '',
environmentExternalUrlForRouteMap: '',
rawPath: 'https://testing.com/flightjs/flight/snippets/51/raw',
rawTextBlob: 'Example raw text content',
simpleViewer: {
collapsed: false,
loadingPartialName: 'loading',
renderError: null,
tooLarge: false,
type: 'simple',
fileType: 'rich',
},
richViewer: {
collapsed: false,
loadingPartialName: 'loading',
renderError: 'too big file',
tooLarge: false,
type: 'rich',
fileType: 'rich',
},
},
],
},
@ -240,4 +260,5 @@ export const headerAppInjected = {
downloadArtifacts: [
'https://gitlab.com/example-group/example-project/-/jobs/artifacts/main/download?job=build',
],
isBinary: false,
};

View File

@ -596,6 +596,7 @@ RSpec.describe BlobHelper, feature_category: :source_code_management do
it 'returns data related to blob header' do
expect(helper.vue_blob_header_app_data(project, blob, ref)).to include({
blob_path: blob.path,
is_binary: blob.binary?,
breadcrumbs: breadcrumb_data,
escaped_ref: ref,
history_link: project_commits_path(project, ref),

View File

@ -25,18 +25,17 @@ RSpec.describe Gitlab::Database::NamespaceProjectIdsEachBatch, feature_category:
it 'returns the correct project IDs' do
expect(
described_class.new(group_id: group.id).execute
).to eq([project1.id, project2.id, project3.id, project4.id, project5.id])
).to match_array([project1.id, project2.id, project3.id, project4.id, project5.id])
end
context 'when passed an optional resolver' do
it 'returns the correct project IDs filtered by resolver',
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/497833' do
it 'returns the correct project IDs filtered by resolver' do
resolver = ->(batch) {
Project.where(id: batch).where(path: [project1.path, project2.path]).pluck_primary_key
}
expect(
described_class.new(group_id: group.id, resolver: resolver).execute
).to eq([project1.id, project2.id])
).to match_array([project1.id, project2.id])
end
end
end

View File

@ -0,0 +1,105 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe WorkItems::DataSync::Widgets::Designs, feature_category: :team_planning do
let_it_be(:current_user) { create(:user) }
let_it_be(:work_item) { create(:work_item) }
let_it_be_with_refind(:target_work_item) { create(:work_item) }
let_it_be(:design) { create(:design, :with_versions, issue: work_item) }
subject(:callback) do
described_class.new(
work_item: work_item, target_work_item: target_work_item, current_user: current_user, params: {}
)
end
describe '#after_save_commit' do
subject(:after_save_commit) { callback.after_save_commit }
shared_examples 'does not copy designs' do
it 'does not call the worker' do
expect(DesignManagement::CopyDesignCollectionWorker).not_to receive(:perform_async)
after_save_commit
end
end
context 'when target work item does not have designs widget' do
before do
allow(target_work_item).to receive(:get_widget).with(:designs).and_return(false)
end
it_behaves_like 'does not copy designs'
end
context "when work_item does not have designs" do
before do
work_item.designs.delete_all
end
it_behaves_like 'does not copy designs'
end
context "when user does not have permissions to read designs" do
it "logs the error message" do
expect(::Gitlab::AppLogger).to receive(:error).with("User cannot copy designs to work item")
after_save_commit
end
it_behaves_like 'does not copy designs'
end
context "when user has permission to read designs", :clean_gitlab_redis_shared_state do
before do
allow(current_user).to receive(:can?).with(:read_design, work_item).and_return(true)
allow(current_user).to receive(:can?).with(:admin_issue, target_work_item).and_return(true)
end
context "when target design collection copy state is not ready" do
before do
target_work_item.design_collection.start_copy!
end
it "logs the error message" do
expect(::Gitlab::AppLogger).to receive(:error).with("Target design collection copy state must be `ready`")
after_save_commit
end
it_behaves_like 'does not copy designs'
end
context 'when target work item has designs widget' do
it 'calls the copy design collection worker' do
expect(DesignManagement::CopyDesignCollectionWorker).to receive(:perform_async).with(
current_user.id,
work_item.id,
target_work_item.id
)
after_save_commit
end
it 'sets the correct design collection copy state' do
expect { after_save_commit }.to change {
target_work_item.design_collection.copy_state
}.from('ready').to('in_progress')
end
end
end
end
describe '#post_move_cleanup' do
let_it_be(:designs) { create_list(:design, 3, :with_versions, issue: work_item) }
it "deletes the original work item design data" do
actions = DesignManagement::Action.where(design: work_item.designs)
expect { callback.post_move_cleanup }.to change { work_item.designs.count }.from(4).to(0)
.and change { work_item.design_versions.count }.from(4).to(0)
.and change { actions.reload.count }.from(4).to(0)
end
end
end

View File

@ -12,13 +12,3 @@ RSpec::Matchers.define :have_tracking do |action:, label: nil, property: nil, te
expect(rendered).to have_css(css)
end
end
RSpec::Matchers.define :have_internal_tracking do |event:, label: nil, testid: nil|
match do |rendered|
css = "[data-event-tracking='#{event}']"
css += "[data-event-label='#{label}']" if label
css += "[data-testid='#{testid}']" if testid
expect(rendered).to have_css(css)
end
end

View File

@ -14,7 +14,7 @@
# Example:
# expect { subject }
# .to trigger_internal_events('web_ide_viewed')
# .with(user: user, project: project, namespace: namepsace)
# .with(user: user, project: project, namespace: namespace)
#
# -- #increment_usage_metrics -------
# Use: Asserts that one or more usage metric was incremented by the right value.
@ -96,6 +96,10 @@ end
RSpec::Matchers.define :trigger_internal_events do |*event_names|
include InternalEventsMatchHelpers
def supports_value_expectations?
true
end
description { "trigger the internal events: #{event_names.join(', ')}" }
failure_message { @failure_message }
@ -117,14 +121,62 @@ RSpec::Matchers.define :trigger_internal_events do |*event_names|
end
end
match do |proc|
@event_names = event_names.flatten
@properties ||= {}
@chained_methods ||= [[:once]]
chain(:on_click) { @on_click = true }
chain(:on_load) { @on_load = true }
match do |input|
setup_match_context(event_names)
check_if_params_provided!(:events, @event_names)
check_if_events_exist!(@event_names)
input.is_a?(Proc) ? expect_events_to_fire(input) : expect_data_attributes(input)
end
match_when_negated do |input|
setup_match_context(event_names)
check_if_events_exist!(@event_names)
input.is_a?(Proc) ? expect_no_events_to_fire(input) : expect_data_attributes(input, negate: true)
end
private
def setup_match_context(event_names)
@event_names = event_names.flatten
@properties ||= {}
end
def expect_no_events_to_fire(proc)
# rubocop:disable RSpec/ExpectGitlabTracking -- Supersedes the #expect_snowplow_event helper for internal events
allow(Gitlab::Tracking).to receive(:event).and_call_original
allow(Gitlab::InternalEvents).to receive(:track_event).and_call_original
# rubocop:enable RSpec/ExpectGitlabTracking
collect_expectations do |event_name|
[
expect_no_snowplow_event(event_name),
expect_no_internal_event(event_name)
]
end
proc.call
verify_expectations
true
rescue RSpec::Mocks::MockExpectationError => e
@failure_message = e.message
false
ensure
# prevent expectations from being satisfied outside of the block scope
unstub_expectations
end
def expect_events_to_fire(proc)
check_chain_methods_for_block!
@chained_methods ||= [[:once]]
allow(Gitlab::InternalEvents).to receive(:track_event).and_call_original
allow(Gitlab::Redis::HLL).to receive(:add).and_call_original
@ -149,37 +201,37 @@ RSpec::Matchers.define :trigger_internal_events do |*event_names|
unstub_expectations
end
match_when_negated do |proc|
@event_names = event_names.flatten
# All `node` inputs should be compatible with the have_css matcher
# https://www.rubydoc.info/gems/capybara/Capybara/RSpecMatchers#have_css-instance_method
def expect_data_attributes(node, negate: false)
# ensure assertions work for Capybara::Node::Simple inputs
node = node.native if node.respond_to?(:native)
check_if_events_exist!(@event_names)
check_negated_chain_methods_for_node! if negate
check_chain_methods_for_node!
check_negated_events_limit_for_node! if negate
check_events_limit_for_node!
# rubocop:disable RSpec/ExpectGitlabTracking -- Supercedes the #expect_snowplow_event helper for internal events
allow(Gitlab::Tracking).to receive(:event).and_call_original
allow(Gitlab::InternalEvents).to receive(:track_event).and_call_original
# rubocop:enable RSpec/ExpectGitlabTracking
collect_expectations do |event_name|
[
expect_no_snowplow_event(event_name),
expect_no_internal_event(event_name)
]
end
proc.call
verify_expectations
expect_data_attribute(node, 'tracking', @event_names.first)
expect_data_attribute(node, 'label', @additional_properties.try(:[], :label))
expect_data_attribute(node, 'property', @additional_properties.try(:[], :property))
expect_data_attribute(node, 'value', @additional_properties.try(:[], :value))
expect_data_attribute(node, 'tracking-load', @on_load)
true
rescue RSpec::Mocks::MockExpectationError => e
rescue RSpec::Expectations::ExpectationNotMetError => e
@failure_message = e.message
false
ensure
# prevent expectations from being satisfied outside of the block scope
unstub_expectations
end
private
# Keep this in sync with the constants in app/assets/javascripts/tracking/constants.js
def expect_data_attribute(node, attribute, value)
if value
expect(node).to have_css("[data-event-#{attribute}=\"#{value}\"]")
else
expect(node).not_to have_css("[data-event-#{attribute}]")
end
end
def receive_expected_count_of(message)
apply_chain_methods(receive(message), @chained_methods)
@ -302,6 +354,39 @@ RSpec::Matchers.define :trigger_internal_events do |*event_names|
doubled_module.expectations.pop
end
end
def check_chain_methods_for_block!
return unless instance_variable_defined?(:@on_load) || instance_variable_defined?(:@on_click)
raise ArgumentError, "Chain methods :on_click, :on_load are only available for Capybara::Node::Simple type " \
"arguments"
end
def check_events_limit_for_node!
return if @event_names.length <= 1
raise ArgumentError, "Providing multiple event names to #{name} is only supported for block arguments"
end
def check_negated_events_limit_for_node!
return if @event_names.none?
raise ArgumentError, "Negated #{name} matcher accepts no arguments or chain methods when testing data attributes"
end
def check_chain_methods_for_node!
return unless @chained_methods
raise ArgumentError, "Chain methods #{@chained_methods.map(&:first).join(',')} are only available for " \
"block arguments"
end
def check_negated_chain_methods_for_node!
return unless instance_variable_defined?(:@on_load) || instance_variable_defined?(:@on_click) || @properties.any?
raise ArgumentError, "Chain methods :on_click, :on_load, :with are unavailable for negated #{name} matcher with " \
"for Capybara::Node::Simple type arguments"
end
end
RSpec::Matchers.define :increment_usage_metrics do |*key_paths|

View File

@ -62,6 +62,8 @@ RSpec.shared_examples 'cloneable and moveable work item' do
end
RSpec.shared_examples 'cloneable and moveable widget data' do
include DesignManagementTestHelpers
def work_item_assignees(work_item)
work_item.reload.assignees
end
@ -94,6 +96,10 @@ RSpec.shared_examples 'cloneable and moveable widget data' do
work_item.reload.customer_relations_contacts
end
def work_item_designs(work_item)
work_item.reload.designs.pluck(:filename)
end
def work_item_labels(work_item)
work_item.reload.labels.pluck(:title)
end
@ -181,6 +187,13 @@ RSpec.shared_examples 'cloneable and moveable widget data' do
timelogs.pluck(:user_id, :time_spent)
end
let_it_be(:designs) do
designs = create_list(:design, 2, :with_lfs_file, issue: original_work_item)
# we need to create an owner for the group, as it is needed when we try to copy the desigins to the new namespace
group.add_owner(create(:user))
designs.pluck(:filename)
end
let_it_be(:labels) do
labels = []
if original_work_item.namespace.is_a?(Group)
@ -223,6 +236,7 @@ RSpec.shared_examples 'cloneable and moveable widget data' do
{ widget_name: :sent_notifications, eval_value: :work_item_sent_notifications, expected_data: notifications, operations: [move] },
{ widget_name: :timelogs, eval_value: :work_item_timelogs, expected_data: timelogs, operations: [move] },
{ widget_name: :customer_relations_contacts, eval_value: :work_item_crm_contacts, expected_data: crm_contacts, operations: [move, clone] },
{ widget_name: :designs, eval_value: :work_item_designs, expected_data: designs, operations: [move, clone] },
{ widget_name: :labels, eval_value: :work_item_labels, expected_data: labels, operations: [move, clone] },
{ widget_name: :work_item_children, eval_value: :work_item_children, expected_data: child_items, operations: [move] }
]
@ -231,10 +245,8 @@ RSpec.shared_examples 'cloneable and moveable widget data' do
context "with widget" do
before do
enable_design_management
allow(original_work_item).to receive(:from_service_desk?).and_return(true)
allow(WorkItems::CopyTimelogsWorker).to receive(:perform_async) do |*args|
WorkItems::CopyTimelogsWorker.perform_inline(*args)
end
end
it_behaves_like 'for clone and move services'
@ -248,6 +260,10 @@ RSpec.shared_examples 'for clone and move services' do
new_work_item = service.execute[:work_item]
widgets.each do |widget|
# This example is being called from EE spec where we text move/clone on a group level work item(Epic).
# Designs are only available for project level work items so we will skip the spec group level work items.
next if widget[:widget_name] == :designs && original_work_item.project.blank?
widget_value = send(widget[:eval_value], new_work_item)
if widget[:operations].include?(described_class)

View File

@ -34,122 +34,240 @@ RSpec.describe 'Internal Events matchers', :clean_gitlab_redis_shared_state, fea
end
describe ':trigger_internal_events' do
it 'raises error if no events are passed to :trigger_internal_events' do
expect do
expect { nil }.to trigger_internal_events
end.to raise_error ArgumentError, 'trigger_internal_events matcher requires events argument'
context 'when testing HTML with data attributes', type: :component do
using RSpec::Parameterized::TableSyntax
let(:event_name) { 'g_edit_by_sfe' }
let(:label) { 'some_label' }
let(:rendered_html) do
<<-HTML
<div>
<a href="#" data-event-tracking="#{event_name}">Click me</a>
</div>
HTML
end
let(:capybara_node) { Capybara::Node::Simple.new(rendered_html) }
where(:html_input) do
[
ref(:rendered_html),
ref(:capybara_node)
]
end
with_them do
context 'when using positive matcher' do
it 'matches elements with correct tracking attribute' do
expect(html_input).to trigger_internal_events(event_name).on_click
end
context 'with incorrect tracking attribute' do
let(:event_name) { 'wrong_event' }
it 'does not match elements' do
expect do
expect(html_input).to trigger_internal_events('g_edit_by_sfe')
end.to raise_error(RSpec::Expectations::ExpectationNotMetError)
end
end
context 'with non existing tracking event' do
let(:event_name) { 'wrong_event' }
it 'does not match elements' do
expect do
expect(html_input).to trigger_internal_events(event_name)
end.to raise_error(ArgumentError)
end
end
context 'with additional properties' do
let(:rendered_html) do
<<-HTML
<div>
<a href="#" data-event-tracking="#{event_name}" data-event-label=\"#{label}\">Click me</a>
</div>
HTML
end
it 'matches elements' do
expect(html_input).to trigger_internal_events(event_name).with(additional_properties: { label: label })
end
end
context 'with tracking-load attribute' do
let(:rendered_html) do
<<-HTML
<div>
<a href="#" data-event-tracking="#{event_name}" data-event-tracking-load=\"true\">Click me</a>
</div>
HTML
end
it 'matches elements' do
expect(rendered_html).to trigger_internal_events(event_name).on_load
end
end
it 'raises error when multiple events are provided' do
expect do
expect(rendered_html).to trigger_internal_events(event_name, event_name)
end.to raise_error(ArgumentError, /Providing multiple event names.*is only supported for block arguments/)
end
it 'raises error when using incompatible chain methods' do
expect do
expect(rendered_html).to trigger_internal_events(event_name).once
end.to raise_error(ArgumentError, /Chain methods.*are only available for block arguments/)
end
end
context 'when using negated matcher' do
let(:rendered_html) { '<div></div>' }
it 'matches elements without tracking attribute' do
expect(rendered_html).not_to trigger_internal_events
end
it 'raises error when passing events to negated matcher' do
expect do
expect(rendered_html).not_to trigger_internal_events(event_name)
end.to raise_error(ArgumentError, /Negated trigger_internal_events matcher accepts no arguments/)
end
it 'raises error when using chain methods with negated matcher' do
expect do
expect(rendered_html).not_to trigger_internal_events(event_name)
.with(additional_properties: { label: label })
end.to raise_error(ArgumentError, /Chain methods.*are unavailable for negated.*matcher/)
end
end
end
end
it 'does not raises error if no events are passed to :not_trigger_internal_events' do
expect do
expect { nil }.to not_trigger_internal_events
end.not_to raise_error
end
it_behaves_like 'matcher and negated matcher both raise expected error',
[:trigger_internal_events, 'bad_event_name'],
"Unknown event 'bad_event_name'! trigger_internal_events matcher accepts only existing events"
it 'bubbles up failure messages' do
expect do
expect { nil }.to trigger_internal_events('g_edit_by_sfe')
end.to raise_expectation_error_with <<~TEXT
(Gitlab::InternalEvents).track_event("g_edit_by_sfe", *(any args))
expected: 1 time with arguments: ("g_edit_by_sfe", *(any args))
received: 0 times
TEXT
end
it 'bubbles up failure messages for negated matcher' do
expect do
expect { track_event }.not_to trigger_internal_events('g_edit_by_sfe')
end.to raise_expectation_error_with <<~TEXT
(Gitlab::InternalEvents).track_event("g_edit_by_sfe", {:namespace=>#<Group id:#{group_1.id} @#{group_1.name}>, :user=>#<User id:#{user_1.id} @#{user_1.username}>})
expected: 0 times with arguments: ("g_edit_by_sfe", anything)
received: 1 time with arguments: ("g_edit_by_sfe", {:namespace=>#<Group id:#{group_1.id} @#{group_1.name}>, :user=>#<User id:#{user_1.id} @#{user_1.username}>})
TEXT
end
it 'handles events that should not be triggered' do
expect { track_event }.to not_trigger_internal_events('web_ide_viewed')
end
it 'ignores extra/irrelevant triggered events' do
expect do
# web_ide_viewed event should not cause a failure when we're only testing g_edit_by_sfe
Gitlab::InternalEvents.track_event('web_ide_viewed', user: user_1, namespace: group_1)
Gitlab::InternalEvents.track_event('g_edit_by_sfe', user: user_1, namespace: group_1)
end.to trigger_internal_events('g_edit_by_sfe')
end
it 'accepts chained event counts like #receive for multiple different events' do
expect do
# #track_event and #trigger_internal_events should be order independent
Gitlab::InternalEvents.track_event('g_edit_by_sfe', user: user_1, namespace: group_1)
Gitlab::InternalEvents.track_event('g_edit_by_sfe', user: user_2, namespace: group_2)
Gitlab::InternalEvents.track_event('web_ide_viewed', user: user_2, namespace: group_2)
Gitlab::InternalEvents.track_event('web_ide_viewed', user: user_2, namespace: group_2)
Gitlab::InternalEvents.track_event('g_edit_by_sfe', user: user_1, namespace: group_1)
end.to trigger_internal_events('g_edit_by_sfe')
.with(user: user_1, namespace: group_1)
.at_least(:once)
.and trigger_internal_events('web_ide_viewed')
.with(user: user_2, namespace: group_2)
.exactly(2).times
.and trigger_internal_events('g_edit_by_sfe')
.with(user: user_2, namespace: group_2)
.once
end
context 'with additional properties' do
let(:additional_properties) { { label: 'label1', value: 123, property: 'property1' } }
let(:tracked_params) { { user: user_1, namespace: group_1, additional_properties: additional_properties } }
let(:expected_params) { tracked_params }
subject(:assertion) do
context 'with backend events' do
it 'raises error if no events are passed to :trigger_internal_events' do
expect do
Gitlab::InternalEvents.track_event('g_edit_by_sfe', **tracked_params)
expect { nil }.to trigger_internal_events
end.to raise_error ArgumentError, 'trigger_internal_events matcher requires events argument'
end
it 'does not raises error if no events are passed to :not_trigger_internal_events' do
expect do
expect { nil }.to not_trigger_internal_events
end.not_to raise_error
end
it_behaves_like 'matcher and negated matcher both raise expected error',
[:trigger_internal_events, 'bad_event_name'],
"Unknown event 'bad_event_name'! trigger_internal_events matcher accepts only existing events"
it 'bubbles up failure messages' do
expect do
expect { nil }.to trigger_internal_events('g_edit_by_sfe')
end.to raise_expectation_error_with <<~TEXT
(Gitlab::InternalEvents).track_event("g_edit_by_sfe", *(any args))
expected: 1 time with arguments: ("g_edit_by_sfe", *(any args))
received: 0 times
TEXT
end
it 'bubbles up failure messages for negated matcher' do
expect do
expect { track_event }.not_to trigger_internal_events('g_edit_by_sfe')
end.to raise_expectation_error_with <<~TEXT
(Gitlab::InternalEvents).track_event("g_edit_by_sfe", {:namespace=>#<Group id:#{group_1.id} @#{group_1.name}>, :user=>#<User id:#{user_1.id} @#{user_1.username}>})
expected: 0 times with arguments: ("g_edit_by_sfe", anything)
received: 1 time with arguments: ("g_edit_by_sfe", {:namespace=>#<Group id:#{group_1.id} @#{group_1.name}>, :user=>#<User id:#{user_1.id} @#{user_1.username}>})
TEXT
end
it 'handles events that should not be triggered' do
expect { track_event }.to not_trigger_internal_events('web_ide_viewed')
end
it 'ignores extra/irrelevant triggered events' do
expect do
# web_ide_viewed event should not cause a failure when we're only testing g_edit_by_sfe
Gitlab::InternalEvents.track_event('web_ide_viewed', user: user_1, namespace: group_1)
Gitlab::InternalEvents.track_event('g_edit_by_sfe', user: user_1, namespace: group_1)
end.to trigger_internal_events('g_edit_by_sfe')
.with(expected_params)
end
it 'accepts chained event counts like #receive for multiple different events' do
expect do
# #track_event and #trigger_internal_events should be order independent
Gitlab::InternalEvents.track_event('g_edit_by_sfe', user: user_1, namespace: group_1)
Gitlab::InternalEvents.track_event('g_edit_by_sfe', user: user_2, namespace: group_2)
Gitlab::InternalEvents.track_event('web_ide_viewed', user: user_2, namespace: group_2)
Gitlab::InternalEvents.track_event('web_ide_viewed', user: user_2, namespace: group_2)
Gitlab::InternalEvents.track_event('g_edit_by_sfe', user: user_1, namespace: group_1)
end.to trigger_internal_events('g_edit_by_sfe')
.with(user: user_1, namespace: group_1)
.at_least(:once)
.and trigger_internal_events('web_ide_viewed')
.with(user: user_2, namespace: group_2)
.exactly(2).times
.and trigger_internal_events('g_edit_by_sfe')
.with(user: user_2, namespace: group_2)
.once
end
shared_examples 'raises error for unexpected event args' do
specify do
expect { assertion }.to raise_error RSpec::Expectations::ExpectationNotMetError,
/received :event with unexpected arguments/
context 'with additional properties' do
let(:extra_track_params) { {} }
let(:additional_properties) { { label: 'label1', value: 123, property: 'property1' } }
let(:tracked_params) do
{ user: user_1, namespace: group_1, additional_properties: additional_properties.merge(extra_track_params) }
end
end
it 'accepts correct additional properties' do
assertion
end
let(:expected_params) { tracked_params }
context 'with extra attributes' do
let(:tracked_params) { super().deep_merge(additional_properties: { other_property: 'other_prop' }) }
subject(:assertion) do
expect do
Gitlab::InternalEvents.track_event('g_edit_by_sfe', **tracked_params)
end.to trigger_internal_events('g_edit_by_sfe')
.with(expected_params)
.once
end
it 'accepts correct extra attributes' do
shared_examples 'raises error for unexpected event args' do
specify do
expect { assertion }.to raise_error RSpec::Expectations::ExpectationNotMetError,
/received :event with unexpected arguments/
end
end
it 'accepts correct additional properties' do
assertion
end
end
context "with wrong label value" do
let(:expected_params) { tracked_params.deep_merge(additional_properties: { label: 'wrong_label' }) }
context 'with extra attributes' do
let(:extra_track_params) { { other_property: 'other_prop' } }
it_behaves_like 'raises error for unexpected event args'
end
it 'accepts correct extra attributes' do
assertion
end
end
context 'with extra attributes expected but not tracked' do
let(:expected_params) { tracked_params.deep_merge(additional_properties: { other_property: 'other_prop' }) }
context "with wrong label value" do
let(:expected_params) { tracked_params.deep_merge(additional_properties: { label: 'wrong_label' }) }
it_behaves_like 'raises error for unexpected event args'
end
it_behaves_like 'raises error for unexpected event args'
end
context 'with extra attributes tracked but not expected' do
let(:expected_params) { { user: user_1, namespace: group_1, additional_properties: additional_properties } }
let(:tracked_params) { expected_params.deep_merge(additional_properties: { other_property: 'other_prop' }) }
context 'with extra attributes expected but not tracked' do
let(:expected_params) { tracked_params.deep_merge(additional_properties: { other_property: 'other_prop' }) }
it_behaves_like 'raises error for unexpected event args'
it_behaves_like 'raises error for unexpected event args'
end
context 'with extra attributes tracked but not expected' do
let(:expected_params) { { user: user_1, namespace: group_1, additional_properties: additional_properties } }
let(:tracked_params) { expected_params.deep_merge(additional_properties: { other_property: 'other_prop' }) }
it_behaves_like 'raises error for unexpected event args'
end
end
end
end

View File

@ -90,7 +90,9 @@ RSpec.describe 'search/_results', feature_category: :global_search do
it 'renders the click text event tracking attributes' do
render
expect(rendered).to have_internal_tracking(event: 'click_search_result', label: scope)
expect(rendered)
.to trigger_internal_events('click_search_result').on_click
.with(additional_properties: { label: scope, value: 1 })
end
end
@ -98,7 +100,7 @@ RSpec.describe 'search/_results', feature_category: :global_search do
it 'does not render the click text event tracking attributes' do
render
expect(rendered).not_to have_internal_tracking(event: 'click_search_result', label: scope)
expect(rendered).not_to trigger_internal_events
end
end
end
@ -134,7 +136,9 @@ RSpec.describe 'search/_results', feature_category: :global_search do
it 'renders the click text event tracking attributes' do
render
expect(rendered).to have_internal_tracking(event: 'click_search_result', label: scope)
expect(rendered)
.to trigger_internal_events('click_search_result').on_click
.with(additional_properties: { label: scope, value: 1 })
end
end
@ -142,7 +146,7 @@ RSpec.describe 'search/_results', feature_category: :global_search do
it 'does not render the click text event tracking attributes' do
render
expect(rendered).not_to have_internal_tracking(event: 'click_search_result', label: scope)
expect(rendered).not_to trigger_internal_events
end
end

View File

@ -1412,7 +1412,8 @@
resolved "https://registry.yarnpkg.com/@gitlab/fonts/-/fonts-1.3.0.tgz#df89c1bb6714e4a8a5d3272568aa4de7fb337267"
integrity sha512-DoMUIN3DqjEn7wvcxBg/b7Ite5fTdF5EmuOZoBRo2j0UBGweDXmNBi+9HrTZs4cBU660dOxcf1hATFcG3npbPg==
"@gitlab/noop@^1.0.0":
"@gitlab/noop@^1.0.0", jackspeak@^2.3.5, "jackspeak@npm:@gitlab/noop@1.0.0":
name jackspeak
version "1.0.0"
resolved "https://registry.yarnpkg.com/@gitlab/noop/-/noop-1.0.0.tgz#b1ecb8ae6b2abf9b2e28927e4fbb05b7a1b2704b"
integrity sha512-nOltttik5o2BjBo8LnyeTFzHoLpMY/XcCVOC+lm9ZwU+ivEam8wafacMF0KTbRn1KVrIoHYdo70QnqS+vJiOVw==
@ -1467,6 +1468,11 @@
resolved "https://registry.yarnpkg.com/@gitlab/web-ide/-/web-ide-0.0.1-dev-20250109231656.tgz#dd8afb853ae04dae09e53b37710869975092bea7"
integrity sha512-AO6uo8fKkmlavWfqHgYNgzMz6U+ppl1uK23uiWEy3aU16xL1/MIGda4LVxzmWLVnxz3BGg0xdqA/Ipsd4VcxVw==
"@gleam-lang/highlight.js-gleam@^1.5.0":
version "1.5.0"
resolved "https://registry.yarnpkg.com/@gleam-lang/highlight.js-gleam/-/highlight.js-gleam-1.5.0.tgz#ab9e43b88b2541a697bcf4d67711c7c4a28adcb3"
integrity sha512-rKKpXnfmHVTPuHEogMVvN4DflzKtX6kBXqu1GsVDb0uDf/bvO8Z2VnC0XWUMuKNlxa+poKIjY6geyxTaVZiMFA==
"@graphiql/react@^0.26.2":
version "0.26.2"
resolved "https://registry.yarnpkg.com/@graphiql/react/-/react-0.26.2.tgz#3a1a01a569b624de8141c53eed24a7db9a523668"
@ -9301,11 +9307,6 @@ iterall@^1.2.1:
resolved "https://registry.yarnpkg.com/iterall/-/iterall-1.3.0.tgz#afcb08492e2915cbd8a0884eb93a8c94d0d72fea"
integrity sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg==
jackspeak@^2.3.5, "jackspeak@npm:@gitlab/noop@1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@gitlab/noop/-/noop-1.0.0.tgz#b1ecb8ae6b2abf9b2e28927e4fbb05b7a1b2704b"
integrity sha512-nOltttik5o2BjBo8LnyeTFzHoLpMY/XcCVOC+lm9ZwU+ivEam8wafacMF0KTbRn1KVrIoHYdo70QnqS+vJiOVw==
jed@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/jed/-/jed-1.1.1.tgz#7a549bbd9ffe1585b0cd0a191e203055bee574b4"