Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-04-24 15:11:42 +00:00
parent cc5c39a2f7
commit 20a9c03a16
1323 changed files with 951 additions and 187217 deletions

View File

@ -109,26 +109,6 @@ package_hunter-bundler:
variables:
PACKAGE_MANAGER: bundler
xray_scan:
extends:
- .default-retry
- .reports:rules:x-ray
stage: lint
needs: []
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/code-creation/repository-x-ray:rc
variables:
OUTPUT_DIR: reports
allow_failure: true
script:
- x-ray-scan -p "$CI_PROJECT_DIR" -o "$OUTPUT_DIR"
artifacts:
# this line uses xray_scan job output as source for GitLab Rails code gen feature
reports:
repository_xray: "$OUTPUT_DIR/*/*.json"
# this line saves xray_scan job output in raw form for inspection for testing purposes
paths:
- "$OUTPUT_DIR/*/*.json"
pajamas_adoption:
extends:
- .default-retry

View File

@ -2797,18 +2797,6 @@
- <<: *if-merge-request
changes: ["Gemfile.lock", "Gemfile.next.lock"]
.reports:rules:x-ray:
rules:
- <<: *if-default-branch-refs
changes: *dependency-patterns
- <<: *if-merge-request
changes: *dependency-patterns
when: never
- <<: *if-merge-request
changes: [".gitlab/ci/reports.gitlab-ci.yml"]
when: manual
- when: never
.reports:rules:pajamas_adoption:
rules:
- <<: *if-not-ee

View File

@ -168,7 +168,7 @@ gem 'grape-path-helpers', '~> 2.0.1', feature_category: :api
gem 'rack-cors', '~> 2.0.1', require: 'rack/cors', feature_category: :shared
# GraphQL API
gem 'graphql', '2.4.11', path: 'vendor/gems/graphql', feature_category: :api
gem 'graphql', '2.4.13', feature_category: :api
gem 'graphql-docs', '~> 5.0.0', group: [:development, :test], feature_category: :api
gem 'apollo_upload_server', '~> 2.1.6', feature_category: :api

View File

@ -85,7 +85,7 @@
{"name":"colored2","version":"3.1.2","platform":"ruby","checksum":"b13c2bd7eeae2cf7356a62501d398e72fde78780bd26aec6a979578293c28b4a"},
{"name":"commonmarker","version":"0.23.11","platform":"ruby","checksum":"9d1d35d358740151bce29235aebfecc63314fb57dd89a83e72d4061b4fe3d2bf"},
{"name":"concurrent-ruby","version":"1.2.3","platform":"ruby","checksum":"82fdd3f8a0816e28d513e637bb2b90a45d7b982bdf4f3a0511722d2e495801e2"},
{"name":"connection_pool","version":"2.5.1","platform":"ruby","checksum":"ae802a90a4b5a081101b39d618e69921a9a50bea9ac3420a5b8c71f1befa3e9c"},
{"name":"connection_pool","version":"2.5.2","platform":"ruby","checksum":"c121d090f8217911f960c7b628bf2bf1b1444f284fd854edc188821c4f602108"},
{"name":"console","version":"1.29.2","platform":"ruby","checksum":"afd9b75a1b047059dda22df0e3c0a386e96f50f6752c87c4b00b1a9fcbe77cd6"},
{"name":"cork","version":"0.3.0","platform":"ruby","checksum":"a0a0ac50e262f8514d1abe0a14e95e71c98b24e3378690e5d044daf0013ad4bc"},
{"name":"cose","version":"1.3.0","platform":"ruby","checksum":"63247c66a5bc76e53926756574fe3724cc0a88707e358c90532ae2a320e98601"},
@ -292,6 +292,7 @@
{"name":"grape-swagger-entity","version":"0.5.5","platform":"ruby","checksum":"a2a0eb28964b1a56775a3571358a9f0a300b703dbaee1ee535adb2a7bed7ece6"},
{"name":"grape_logging","version":"1.8.4","platform":"ruby","checksum":"efcc3e322dbd5d620a68f078733b7db043cf12680144cd03c982f14115c792d1"},
{"name":"graphlyte","version":"1.0.0","platform":"ruby","checksum":"b5af4ab67dde6e961f00ea1c18f159f73b52ed11395bb4ece297fe628fa1804d"},
{"name":"graphql","version":"2.4.13","platform":"ruby","checksum":"fb1db6e9e24c93c995f8083d66ec65ea70991aa2b68da1b15a360b418af5aa9d"},
{"name":"graphql-docs","version":"5.0.0","platform":"ruby","checksum":"76baca6e5a803a4b6a9fbbbfdbf16742b7c4c546c8592b6e1a7aa4e79e562d04"},
{"name":"grpc","version":"1.63.0","platform":"aarch64-linux","checksum":"dc75c5fd570b819470781d9512105dddfdd11d984f38b8e60bb946f92d1f79ee"},
{"name":"grpc","version":"1.63.0","platform":"arm64-darwin","checksum":"91b93a354508a9d1772f095554f2e4c04358c2b32d7a670e3705b7fc4695c996"},

View File

@ -170,14 +170,6 @@ PATH
google-protobuf (~> 3)
grpc
PATH
remote: vendor/gems/graphql
specs:
graphql (2.4.11)
base64
fiber-storage
logger
PATH
remote: vendor/gems/mail-smtp_pool
specs:
@ -453,7 +445,7 @@ GEM
colored2 (3.1.2)
commonmarker (0.23.11)
concurrent-ruby (1.2.3)
connection_pool (2.5.1)
connection_pool (2.5.2)
console (1.29.2)
fiber-annotation
fiber-local (~> 1.1)
@ -948,6 +940,10 @@ GEM
grape
rack
graphlyte (1.0.0)
graphql (2.4.13)
base64
fiber-storage
logger
graphql-docs (5.0.0)
commonmarker (~> 0.23, >= 0.23.6)
escape_utils (~> 1.2)
@ -2180,7 +2176,7 @@ DEPENDENCIES
grape-swagger-entity (~> 0.5.5)
grape_logging (~> 1.8, >= 1.8.4)
graphlyte (~> 1.0.0)
graphql (= 2.4.11)!
graphql (= 2.4.13)
graphql-docs (~> 5.0.0)
grpc (= 1.63.0)
gssapi (~> 1.3.1)

View File

@ -85,7 +85,7 @@
{"name":"colored2","version":"3.1.2","platform":"ruby","checksum":"b13c2bd7eeae2cf7356a62501d398e72fde78780bd26aec6a979578293c28b4a"},
{"name":"commonmarker","version":"0.23.11","platform":"ruby","checksum":"9d1d35d358740151bce29235aebfecc63314fb57dd89a83e72d4061b4fe3d2bf"},
{"name":"concurrent-ruby","version":"1.2.3","platform":"ruby","checksum":"82fdd3f8a0816e28d513e637bb2b90a45d7b982bdf4f3a0511722d2e495801e2"},
{"name":"connection_pool","version":"2.5.1","platform":"ruby","checksum":"ae802a90a4b5a081101b39d618e69921a9a50bea9ac3420a5b8c71f1befa3e9c"},
{"name":"connection_pool","version":"2.5.2","platform":"ruby","checksum":"c121d090f8217911f960c7b628bf2bf1b1444f284fd854edc188821c4f602108"},
{"name":"console","version":"1.29.2","platform":"ruby","checksum":"afd9b75a1b047059dda22df0e3c0a386e96f50f6752c87c4b00b1a9fcbe77cd6"},
{"name":"cork","version":"0.3.0","platform":"ruby","checksum":"a0a0ac50e262f8514d1abe0a14e95e71c98b24e3378690e5d044daf0013ad4bc"},
{"name":"cose","version":"1.3.0","platform":"ruby","checksum":"63247c66a5bc76e53926756574fe3724cc0a88707e358c90532ae2a320e98601"},
@ -292,6 +292,7 @@
{"name":"grape-swagger-entity","version":"0.5.5","platform":"ruby","checksum":"a2a0eb28964b1a56775a3571358a9f0a300b703dbaee1ee535adb2a7bed7ece6"},
{"name":"grape_logging","version":"1.8.4","platform":"ruby","checksum":"efcc3e322dbd5d620a68f078733b7db043cf12680144cd03c982f14115c792d1"},
{"name":"graphlyte","version":"1.0.0","platform":"ruby","checksum":"b5af4ab67dde6e961f00ea1c18f159f73b52ed11395bb4ece297fe628fa1804d"},
{"name":"graphql","version":"2.4.13","platform":"ruby","checksum":"fb1db6e9e24c93c995f8083d66ec65ea70991aa2b68da1b15a360b418af5aa9d"},
{"name":"graphql-docs","version":"5.0.0","platform":"ruby","checksum":"76baca6e5a803a4b6a9fbbbfdbf16742b7c4c546c8592b6e1a7aa4e79e562d04"},
{"name":"grpc","version":"1.63.0","platform":"aarch64-linux","checksum":"dc75c5fd570b819470781d9512105dddfdd11d984f38b8e60bb946f92d1f79ee"},
{"name":"grpc","version":"1.63.0","platform":"arm64-darwin","checksum":"91b93a354508a9d1772f095554f2e4c04358c2b32d7a670e3705b7fc4695c996"},

View File

@ -170,14 +170,6 @@ PATH
google-protobuf (~> 3)
grpc
PATH
remote: vendor/gems/graphql
specs:
graphql (2.4.11)
base64
fiber-storage
logger
PATH
remote: vendor/gems/mail-smtp_pool
specs:
@ -465,7 +457,7 @@ GEM
colored2 (3.1.2)
commonmarker (0.23.11)
concurrent-ruby (1.2.3)
connection_pool (2.5.1)
connection_pool (2.5.2)
console (1.29.2)
fiber-annotation
fiber-local (~> 1.1)
@ -960,6 +952,10 @@ GEM
grape
rack
graphlyte (1.0.0)
graphql (2.4.13)
base64
fiber-storage
logger
graphql-docs (5.0.0)
commonmarker (~> 0.23, >= 0.23.6)
escape_utils (~> 1.2)
@ -2214,7 +2210,7 @@ DEPENDENCIES
grape-swagger-entity (~> 0.5.5)
grape_logging (~> 1.8, >= 1.8.4)
graphlyte (~> 1.0.0)
graphql (= 2.4.11)!
graphql (= 2.4.13)
graphql-docs (~> 5.0.0)
grpc (= 1.63.0)
gssapi (~> 1.3.1)

View File

@ -469,6 +469,7 @@ export default {
'goToFile',
'reviewFile',
'setFileCollapsedByUser',
'toggleTreeOpen',
]),
...mapActions(useFileBrowser, ['setFileBrowserVisibility']),
...mapVuexActions('findingsDrawer', ['setDrawer']),
@ -805,6 +806,7 @@ export default {
class="gl-px-5"
:total-files-count="numTotalFiles"
@clickFile="goToFile({ path: $event.path })"
@toggleFolder="toggleTreeOpen"
/>
<div class="col-12 col-md-auto diff-files-holder">
<commit-widget v-if="commit" :commit="commit" :collapsible="false" />

View File

@ -35,6 +35,11 @@ export default {
default: undefined,
required: false,
},
groupBlobsListItems: {
type: Boolean,
required: false,
default: true,
},
},
data() {
return {
@ -149,7 +154,9 @@ export default {
:loaded-files="loadedFiles"
:total-files-count="totalFilesCount"
:row-height="rowHeight"
:group-blobs-list-items="groupBlobsListItems"
@clickFile="onFileClick"
@toggleFolder="$emit('toggleFolder', $event)"
/>
</div>
</file-browser-height>

View File

@ -48,6 +48,11 @@ export default {
type: Number,
required: true,
},
groupBlobsListItems: {
type: Boolean,
required: false,
default: true,
},
},
data() {
return {
@ -62,7 +67,26 @@ export default {
'fileTree',
'allBlobs',
'linkedFile',
'flatBlobsList',
]),
flatUngroupedList() {
return this.flatBlobsList.reduce((acc, blob, index) => {
const loading = this.isLoading(blob.fileHash);
const lastIndex = acc.length;
const previous = acc[lastIndex - 1];
const adjacentNonHeader = previous?.isHeader ? acc[lastIndex - 2] : previous;
const isSibling = adjacentNonHeader?.parentPath === blob.parentPath;
if (isSibling) return [...acc, { ...blob, loading, level: 1 }];
const header = {
key: `header-${index}`,
path: blob.parentPath,
isHeader: true,
tree: [],
level: 0,
};
return [...acc, header, { ...blob, loading, level: 1 }];
}, []);
},
filteredTreeList() {
let search = this.search.toLowerCase().trim();
@ -101,8 +125,7 @@ export default {
const result = [];
const createFlatten = (level, hidden) => (item) => {
const isTree = item.type === 'tree';
const loading =
!isTree && !item.isHeader && this.loadedFiles && !this.loadedFiles[item.fileHash];
const loading = !isTree && !item.isHeader && this.isLoading(item.fileHash);
result.push({
...item,
hidden,
@ -152,6 +175,8 @@ export default {
];
},
treeList() {
if (!this.renderTreeList && !this.groupBlobsListItems && !this.search)
return this.flatUngroupedList;
const list = this.linkedFile ? this.flatListWithLinkedFile : this.flatFilteredTreeList;
if (this.search) return list;
return list.filter((item) => !item.hidden);
@ -166,7 +191,7 @@ export default {
},
},
methods: {
...mapActions(useLegacyDiffs, ['toggleTreeOpen', 'setRenderTreeList', 'setTreeOpen']),
...mapActions(useLegacyDiffs, ['setRenderTreeList', 'setTreeOpen']),
preventClippingSelectedFile(hash) {
// let the layout stabilize, we need to wait for:
// scroll to file, sticky elements update, file browser height update
@ -194,6 +219,9 @@ export default {
.forEach((path) => this.setTreeOpen({ path, opened: true }));
}
},
isLoading(fileHash) {
return this.loadedFiles && !this.loadedFiles[fileHash];
},
},
searchPlaceholder: sprintf(s__('MergeRequest|Search (e.g. *.vue) (%{MODIFIER_KEY}P)'), {
MODIFIER_KEY,
@ -260,7 +288,7 @@ export default {
:tabindex="item.loading ? -1 : 0"
class="gl-relative !gl-m-1"
:data-file-row="item.fileHash"
@toggleTreeOpen="toggleTreeOpen"
@toggleTreeOpen="$emit('toggleFolder', $event)"
@clickFile="!item.loading && $emit('clickFile', $event)"
/>
</template>

View File

@ -1,4 +1,4 @@
#import "ee_else_ce/organizations/shared/graphql/fragments/group.fragment.graphql"
#import "~/organizations/shared/graphql/fragments/group.fragment.graphql"
query getMemberYourWorkGroups($search: String, $sort: String, $parentId: Int, $page: Int) {
groups(search: $search, sort: $sort, parentId: $parentId, page: $page) @client {

View File

@ -1,6 +1,6 @@
import axios from '~/lib/utils/axios_utils';
import { parseIntPagination, normalizeHeaders } from '~/lib/utils/common_utils';
import { formatGroup } from 'ee_else_ce/groups/your_work/graphql/utils';
import { formatGroup } from '~/groups/your_work/graphql/utils';
export const resolvers = (endpoint) => ({
Query: {

View File

@ -34,4 +34,7 @@ export const formatGroup = (group) => ({
organizationEditPath: '',
groupMembersCount: 0,
isLinkedToSubscription: false,
markedForDeletionOn: null,
isAdjournedDeletionEnabled: false,
permanentDeletionDate: null,
});

View File

@ -142,6 +142,14 @@ const sortTreesByTypeAndName = (a, b) => {
return 0;
};
export const linkTreeNodes = (tree) => {
return tree.map((entity) =>
Object.assign(entity, {
tree: entity.tree.length ? linkTreeNodes(entity.tree) : [],
}),
);
};
export const sortTree = (sortedTree) =>
sortedTree
.map((entity) =>

View File

@ -1,26 +0,0 @@
fragment BaseGroup on Group {
id
fullPath
fullName
parent {
id
}
webUrl
organizationEditPath
descriptionHtml
avatarUrl
descendantGroupsCount
projectsCount
groupMembersCount
visibility
createdAt
updatedAt
userPermissions {
removeGroup
viewEditPage
}
maxAccessLevel {
integerValue
}
isLinkedToSubscription
}

View File

@ -1,5 +1,29 @@
#import "./base_group.fragment.graphql"
fragment Group on Group {
...BaseGroup
id
fullPath
fullName
parent {
id
}
webUrl
organizationEditPath
descriptionHtml
avatarUrl
descendantGroupsCount
projectsCount
groupMembersCount
visibility
createdAt
updatedAt
userPermissions {
removeGroup
viewEditPage
}
maxAccessLevel {
integerValue
}
isLinkedToSubscription
markedForDeletionOn
isAdjournedDeletionEnabled
permanentDeletionDate
}

View File

@ -1,5 +1,5 @@
#import "~/graphql_shared/fragments/page_info.fragment.graphql"
#import "ee_else_ce/organizations/shared/graphql/fragments/group.fragment.graphql"
#import "~/organizations/shared/graphql/fragments/group.fragment.graphql"
query getOrganizationGroups(
$id: OrganizationsOrganizationID!

View File

@ -288,7 +288,7 @@ export default class ActivityCalendar {
$(this.activitiesContainer)
.empty()
.append(loadingIconForLegacyJS({ size: 'lg' }));
.append(loadingIconForLegacyJS({ size: 'md', classes: 'gl-my-8' }));
$(this.recentActivitiesContainer).hide();

View File

@ -4,12 +4,20 @@ import DiffsFileTree from '~/diffs/components/diffs_file_tree.vue';
import { useDiffsList } from '~/rapid_diffs/stores/diffs_list';
import { useFileBrowser } from '~/diffs/stores/file_browser';
import { useDiffsView } from '~/rapid_diffs/stores/diffs_view';
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
export default {
name: 'FileBrowser',
components: {
DiffsFileTree,
},
props: {
groupBlobsListItems: {
type: Boolean,
required: false,
default: true,
},
},
computed: {
...mapState(useDiffsView, ['totalFilesCount']),
...mapState(useDiffsList, ['loadedFiles']),
@ -19,6 +27,9 @@ export default {
clickFile(file) {
this.$emit('clickFile', file);
},
toggleFolder(path) {
useLegacyDiffs().toggleTreeOpen(path);
},
},
};
</script>
@ -29,6 +40,8 @@ export default {
floating-resize
:loaded-files="loadedFiles"
:total-files-count="totalFilesCount"
:group-blobs-list-items="groupBlobsListItems"
@clickFile="clickFile"
@toggleFolder="toggleFolder"
/>
</template>

View File

@ -11,6 +11,7 @@ import { createAlert } from '~/alert';
import { __ } from '~/locale';
import { fixWebComponentsStreamingOnSafari } from '~/rapid_diffs/app/safari_fix';
import { DIFF_FILE_MOUNTED } from '~/rapid_diffs/dom_events';
import { parseBoolean } from '~/lib/utils/common_utils';
// This facade interface joins together all the bits and pieces of Rapid Diffs: DiffFile, Settings, File browser, etc.
// It's a unified entrypoint for Rapid Diffs and all external communications should happen through this interface.
@ -25,7 +26,7 @@ class RapidDiffsFacade {
document.querySelector('[data-diffs-list]'),
this.DiffFileImplementation,
);
const { reloadStreamUrl, diffsStatsEndpoint, diffFilesEndpoint } =
const { reloadStreamUrl, diffsStatsEndpoint, diffFilesEndpoint, shouldSortMetadataFiles } =
document.querySelector('[data-rapid-diffs]').dataset;
useDiffsView(pinia).diffsStatsEndpoint = diffsStatsEndpoint;
useDiffsView(pinia)
@ -36,7 +37,7 @@ class RapidDiffsFacade {
error,
});
});
initFileBrowser(diffFilesEndpoint).catch((error) => {
initFileBrowser(diffFilesEndpoint, parseBoolean(shouldSortMetadataFiles)).catch((error) => {
createAlert({
message: __('Failed to load file browser. Try reloading the page.'),
error,

View File

@ -1,20 +1,20 @@
import Vue from 'vue';
import axios from 'axios';
import store from '~/mr_notes/stores';
import { pinia } from '~/pinia/instance';
import { DiffFile } from '~/rapid_diffs/diff_file';
import FileBrowserToggle from '~/diffs/components/file_browser_toggle.vue';
import { generateTreeList } from '~/diffs/utils/tree_worker_utils';
import { SET_TREE_DATA } from '~/diffs/store/mutation_types';
import { sortTree } from '~/ide/stores/utils';
import { linkTreeNodes, sortTree } from '~/ide/stores/utils';
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
import FileBrowser from './file_browser.vue';
const loadFileBrowserData = async (diffFilesEndpoint) => {
const loadFileBrowserData = async (diffFilesEndpoint, shouldSort) => {
const { data } = await axios.get(diffFilesEndpoint);
const { treeEntries, tree } = generateTreeList(data.diff_files);
store.commit(`diffs/${SET_TREE_DATA}`, {
useLegacyDiffs()[SET_TREE_DATA]({
treeEntries,
tree: sortTree(tree),
tree: shouldSort ? sortTree(tree) : linkTreeNodes(tree),
});
};
@ -31,15 +31,17 @@ const initToggle = () => {
});
};
const initBrowserComponent = async () => {
const initBrowserComponent = async (shouldSort) => {
const el = document.querySelector('[data-file-browser]');
// eslint-disable-next-line no-new
new Vue({
el,
store,
pinia,
render(h) {
return h(FileBrowser, {
props: {
groupBlobsListItems: shouldSort,
},
on: {
clickFile(file) {
DiffFile.findByFileHash(file.fileHash).selectFile();
@ -50,8 +52,8 @@ const initBrowserComponent = async () => {
});
};
export async function initFileBrowser(diffFilesEndpoint) {
export async function initFileBrowser(diffFilesEndpoint, shouldSort) {
initToggle();
await loadFileBrowserData(diffFilesEndpoint);
initBrowserComponent();
await loadFileBrowserData(diffFilesEndpoint, shouldSort);
initBrowserComponent(shouldSort);
}

View File

@ -0,0 +1,48 @@
<script>
import { GlLink, GlSprintf } from '@gitlab/ui';
import { __ } from '~/locale';
import { helpPagePath } from '~/helpers/help_page_helper';
export default {
name: 'GroupListItemDelayedDeletionModalFooter',
components: {
GlSprintf,
GlLink,
},
props: {
group: {
type: Object,
required: true,
},
},
i18n: {
groupRestoreMessage: __(
'This group can be restored until %{date}. %{linkStart}Learn more%{linkEnd}.',
),
},
computed: {
isMarkedForDeletion() {
return Boolean(this.group.markedForDeletionOn);
},
canBeMarkedForDeletion() {
return this.group.isAdjournedDeletionEnabled && !this.isMarkedForDeletion;
},
},
HELP_PAGE_PATH: helpPagePath('user/group/_index', { anchor: 'restore-a-group' }),
};
</script>
<template>
<p
v-if="canBeMarkedForDeletion"
class="gl-mb-0 gl-mt-3 gl-text-subtle"
data-testid="delayed-delete-modal-footer"
>
<gl-sprintf :message="$options.i18n.groupRestoreMessage">
<template #date>{{ group.permanentDeletionDate }}</template>
<template #link="{ content }">
<gl-link :href="$options.HELP_PAGE_PATH">{{ content }}</gl-link>
</template>
</gl-sprintf>
</p>
</template>

View File

@ -1,10 +1,21 @@
<script>
import { GlSprintf } from '@gitlab/ui';
import { __ } from '~/locale';
import DangerConfirmModal from '~/vue_shared/components/confirm_danger/confirm_danger_modal.vue';
import GroupListItemDelayedDeletionModalFooter from '~/vue_shared/components/groups_list/group_list_item_delayed_deletion_modal_footer.vue';
export default {
name: 'GroupListItemDeleteModalCE',
name: 'GroupListItemDeleteModal',
i18n: {
immediatelyDeleteModalTitle: __('Delete group immediately?'),
immediatelyDeleteModalBody: __(
'This group is scheduled to be deleted on %{date}. You are about to delete this group, including its subgroups and projects, immediately. This action cannot be undone.',
),
},
components: {
GlSprintf,
DangerConfirmModal,
GroupListItemDelayedDeletionModalFooter,
},
props: {
visible: {
@ -25,18 +36,48 @@ export default {
required: false,
default: false,
},
group: {
type: Object,
required: true,
},
},
computed: {
isMarkedForDeletion() {
return Boolean(this.group.markedForDeletionOn);
},
groupWillBeImmediatelyDeleted() {
return !this.group.isAdjournedDeletionEnabled || this.isMarkedForDeletion;
},
deleteModalOverride() {
return this.groupWillBeImmediatelyDeleted
? this.$options.i18n.immediatelyDeleteModalTitle
: undefined;
},
},
};
</script>
<template>
<danger-confirm-modal
v-if="visible"
visible
:visible="visible"
:modal-title="deleteModalOverride"
:modal-id="modalId"
:phrase="phrase"
:confirm-loading="confirmLoading"
@confirm.prevent="$emit('confirm', $event)"
@change="$emit('change', $event)"
/>
>
<template v-if="groupWillBeImmediatelyDeleted" #modal-body>
<p>
<gl-sprintf :message="$options.i18n.immediatelyDeleteModalBody">
<template #date
><span class="gl-font-bold">{{ group.permanentDeletionDate }}</span></template
>
</gl-sprintf>
</p>
</template>
<template #modal-footer
><group-list-item-delayed-deletion-modal-footer :group="group"
/></template>
</danger-confirm-modal>
</template>

View File

@ -0,0 +1,41 @@
<script>
import { GlBadge } from '@gitlab/ui';
import { __ } from '~/locale';
export default {
name: 'GroupListItemInactiveBadge',
i18n: {
pendingDeletion: __('Pending deletion'),
},
components: {
GlBadge,
},
props: {
group: {
type: Object,
required: true,
},
},
computed: {
isPendingDeletion() {
return Boolean(this.group.markedForDeletionOn);
},
inactiveBadge() {
if (this.isPendingDeletion) {
return {
variant: 'warning',
text: this.$options.i18n.pendingDeletion,
};
}
return null;
},
},
};
</script>
<template>
<gl-badge v-if="inactiveBadge" :variant="inactiveBadge.variant">{{
inactiveBadge.text
}}</gl-badge>
</template>

View File

@ -3,7 +3,7 @@ import { GlIcon, GlBadge, GlTooltipDirective } from '@gitlab/ui';
import uniqueId from 'lodash/uniqueId';
import { createAlert } from '~/alert';
import GroupListItemDeleteModal from 'ee_else_ce/vue_shared/components/groups_list/group_list_item_delete_modal.vue';
import GroupListItemDeleteModal from '~/vue_shared/components/groups_list/group_list_item_delete_modal.vue';
import axios from '~/lib/utils/axios_utils';
import { VISIBILITY_TYPE_ICON, GROUP_VISIBILITY_TYPE } from '~/visibility_level/constants';
import { ACCESS_LEVEL_LABELS, ACCESS_LEVEL_NO_ACCESS_INTEGER } from '~/access_level/constants';
@ -16,11 +16,9 @@ import {
} from '~/vue_shared/components/resource_lists/constants';
import ListItem from '~/vue_shared/components/resource_lists/list_item.vue';
import ListItemStat from '~/vue_shared/components/resource_lists/list_item_stat.vue';
import {
renderDeleteSuccessToast,
deleteParams,
} from 'ee_else_ce/vue_shared/components/groups_list/utils';
import { renderDeleteSuccessToast, deleteParams } from '~/vue_shared/components/groups_list/utils';
import GroupListItemPreventDeleteModal from './group_list_item_prevent_delete_modal.vue';
import GroupListItemInactiveBadge from './group_list_item_inactive_badge.vue';
export default {
i18n: {
@ -38,8 +36,7 @@ export default {
GlBadge,
GroupListItemPreventDeleteModal,
GroupListItemDeleteModal,
GroupListItemInactiveBadge: () =>
import('ee_component/vue_shared/components/groups_list/group_list_item_inactive_badge.vue'),
GroupListItemInactiveBadge,
},
directives: {
GlTooltip: GlTooltipDirective,

View File

@ -1,15 +1,31 @@
import toast from '~/vue_shared/plugins/global_toast';
import { sprintf, __ } from '~/locale';
export const renderDeleteSuccessToast = (group) => {
export const renderDeleteSuccessToast = (item) => {
// If delayed deletion is disabled or the project/group is already marked for deletion
if (!item.isAdjournedDeletionEnabled || item.markedForDeletionOn) {
toast(
sprintf(__("Group '%{group_name}' is being deleted."), {
group_name: item.fullName,
}),
);
return;
}
toast(
sprintf(__("Group '%{group_name}' is being deleted."), {
group_name: group.fullName,
sprintf(__("Group '%{group_name}' will be deleted on %{date}."), {
group_name: item.fullName,
date: item.permanentDeletionDate,
}),
);
};
export const deleteParams = () => {
// Overridden in EE
return {};
export const deleteParams = (item) => {
// If delayed deletion is disabled or the project/group is not yet marked for deletion
if (!item.isAdjournedDeletionEnabled || !item.markedForDeletionOn) {
return {};
}
return { permanently_remove: true };
};

View File

@ -158,6 +158,11 @@ export default {
required: false,
default: 0,
},
eeEpicListQuery: {
type: Object,
required: false,
default: null,
},
withTabs: {
type: Boolean,
required: false,
@ -199,7 +204,9 @@ export default {
},
apollo: {
workItems: {
query: getWorkItemsQuery,
query() {
return this.isEpicsList && this.eeEpicListQuery ? this.eeEpicListQuery : getWorkItemsQuery;
},
variables() {
return this.queryVariables;
},

View File

@ -181,7 +181,7 @@ $profile-grid-flex: 1fr;
@apply gl-bg-subtle;
}
.user-activity-content {
.user-activity-content, .user-calendar-activities {
position: relative;
.system-note-image {

View File

@ -11,7 +11,7 @@
streamRequest: fetch('#{Gitlab::UrlSanitizer.sanitize(@stream_url)}', { signal: controller.signal })
}
.rd-app{ data: { rapid_diffs: true, reload_stream_url: @reload_stream_url, diffs_stats_endpoint: @diffs_stats_endpoint, diff_files_endpoint: @diff_files_endpoint } }
.rd-app{ data: { rapid_diffs: true, reload_stream_url: @reload_stream_url, diffs_stats_endpoint: @diffs_stats_endpoint, diff_files_endpoint: @diff_files_endpoint, should_sort_metadata_files: @should_sort_metadata_files.to_json } }
.rd-app-header
.rd-app-file-browser-toggle
%div{ data: { file_browser_toggle: true } }

View File

@ -13,6 +13,7 @@ module RapidDiffs
update_user_endpoint:,
diffs_stats_endpoint:,
diff_files_endpoint:,
should_sort_metadata_files: false,
lazy: false
)
@diffs_slice = diffs_slice
@ -23,6 +24,7 @@ module RapidDiffs
@update_user_endpoint = update_user_endpoint
@diffs_stats_endpoint = diffs_stats_endpoint
@diff_files_endpoint = diff_files_endpoint
@should_sort_metadata_files = should_sort_metadata_files
@lazy = lazy
end

View File

@ -15,7 +15,7 @@ module RapidDiffs
return render_404 unless diffs_resource.present?
render json: {
diff_files: DiffFileMetadataEntity.represent(diffs_resource.raw_diff_files(sorted: true))
diff_files: DiffFileMetadataEntity.represent(diffs_resource.raw_diff_files)
}
end

View File

@ -201,7 +201,9 @@ class UsersController < ApplicationController
rescue StandardError
Date.today
end
@events = contributions_calendar.events_by_date(@calendar_date).map(&:present)
Events::RenderService.new(current_user).execute(@events)
render 'calendar_activities', layout: false
end

View File

@ -7,7 +7,8 @@ module Groups
'type' => 'object',
'properties' => {
'group_id' => { 'type' => 'integer' },
'root_namespace_id' => { 'type' => 'integer' }
'root_namespace_id' => { 'type' => 'integer' },
'parent_namespace_id' => { 'type' => 'integer' }
},
'required' => %w[group_id root_namespace_id]
}

View File

@ -476,7 +476,7 @@ module Types
end
def permanent_deletion_date
return unless group.adjourned_deletion_configured?
return unless group.adjourned_deletion?
permanent_deletion_date_formatted(Date.current)
end

View File

@ -312,14 +312,14 @@ module EventsHelper
def icon_for_profile_event(event)
base_class = 'system-note-image'
classes = current_path?('users#activity') ? "#{event.action_name.parameterize}-icon gl-rounded-full gl-bg-strong gl-leading-0" : "user-avatar"
content = current_path?('users#activity') ? icon_for_event(event.action_name, size: 14) : author_avatar(event, size: 32, css_class: 'gl-inline-block', project: event.project)
classes = current_controller?('users') ? "#{event.action_name.parameterize}-icon gl-rounded-full gl-bg-strong gl-leading-0" : "user-avatar"
content = current_controller?('users') ? icon_for_event(event.action_name, size: 14) : author_avatar(event, size: 32, css_class: 'gl-inline-block', project: event.project)
tag.div(class: "#{base_class} #{classes}") { content }
end
def inline_event_icon(event)
unless current_path?('users#activity')
unless current_controller?('users')
content_tag :span, class: "system-note-image-inline gl-flex gl-mr-2 gl-mt-1 #{event.action_name.parameterize}-icon" do
next design_event_icon(event.action, size: 14) if event.design?
@ -329,7 +329,7 @@ module EventsHelper
end
def event_user_info(event)
return if current_path?('users#activity')
return if current_controller?('users')
tag.div(class: 'event-user-info') do
concat tag.span(link_to_author(event), class: 'author-name')
@ -339,7 +339,7 @@ module EventsHelper
end
def user_profile_activity_classes
current_path?('users#activity') ? ' gl-font-semibold gl-text-default' : ''
current_controller?('users') ? ' gl-font-semibold gl-text-default' : ''
end
private

View File

@ -14,6 +14,8 @@ module DraftNotes
merge_request_activity_counter.track_publish_review_action(user: current_user)
end
todo_service.new_review(merge_request, current_user)
success
rescue ActiveRecord::RecordInvalid => e
message = "Unable to save #{e.record.class.name}: #{e.record.errors.full_messages.join(', ')} "
@ -51,7 +53,6 @@ module DraftNotes
keep_around_commits(created_notes)
draft_notes.delete_all
notification_service.async.new_review(review)
todo_service.new_review(review, current_user)
MergeRequests::ResolvedDiscussionNotificationService.new(project: project, current_user: current_user).execute(merge_request)
GraphqlTriggers.merge_request_merge_status_updated(merge_request)
after_publish

View File

@ -115,14 +115,13 @@ module Groups
# rubocop:enable CodeReuse/ActiveRecord
def publish_event
event = Groups::GroupDeletedEvent.new(
data: {
group_id: group.id,
root_namespace_id: group.root_ancestor&.id.to_i # remove safe navigation and `.to_i` with https://gitlab.com/gitlab-org/gitlab/-/issues/508611
}
)
event_data = {
group_id: group.id,
root_namespace_id: group.root_ancestor&.id.to_i # remove safe navigation and `.to_i` with https://gitlab.com/gitlab-org/gitlab/-/issues/508611
}
event_data[:parent_namespace_id] = group.parent_id if group.parent_id.present?
Gitlab::EventStore.publish(event)
Gitlab::EventStore.publish(Groups::GroupDeletedEvent.new(data: event_data))
end
end
end

View File

@ -193,8 +193,8 @@ class TodoService
#
# * Mark all outstanding todos on this MR for the current user as done
#
def new_review(review, current_user)
resolve_todos_for_target(review.merge_request, current_user)
def new_review(merge_request, current_user)
resolve_todos_for_target(merge_request, current_user)
end
# When user marks a target as todo

View File

@ -1,24 +1,31 @@
- event = event.present
- event_visible_to_user = event.visible_to_user?(current_user)
- timezone = local_assigns[:timezone] || nil
- if event.visible_to_user?(current_user)
.event-item.gl-border-b-0.gl-pb-3{ class: current_path?('users#activity') ? 'user-profile-activity !gl-pl-7' : 'project-activity-item' }
- if event_visible_to_user || @user&.include_private_contributions?
.event-item.gl-border-b-0.gl-pb-3{ class: current_controller?('users') ? 'user-profile-activity !gl-pl-7' : 'project-activity-item' }
.event-item-timestamp.gl-text-sm
#{time_ago_with_tooltip(event.created_at)}
- if event.imported?
- if timezone
%span.js-localtime{ data: { datetime: event.created_at.utc.strftime('%Y-%m-%dT%H:%M:%SZ'), toggle: 'tooltip', placement: 'top' } }
= event.created_at.to_time.in_time_zone(timezone).strftime('%-I:%M%P')
- else
= time_ago_with_tooltip(event.created_at)
- if event_visible_to_user && event.imported?
%span &middot;
= render "import/shared/imported_badge", text_only: true, importable: _('event')
- if event.wiki_page?
= render "events/event/wiki", event: event
- elsif event.design?
= render 'events/event/design', event: event
- elsif event.created_project_action?
= render "events/event/created_project", event: event
- elsif event.push_action?
= render "events/event/push", event: event
- elsif event.commented_action?
= render "events/event/note", event: event
- else
= render "events/event/common", event: event
- elsif @user&.include_private_contributions?
= render "events/event/private", event: event
- if event_visible_to_user
- if event.wiki_page?
= render "events/event/wiki", event: event
- elsif event.design?
= render 'events/event/design', event: event
- elsif event.created_project_action?
= render "events/event/created_project", event: event
- elsif event.push_action?
= render "events/event/push", event: event
- elsif event.commented_action?
= render "events/event/note", event: event
- else
= render "events/event/common", event: event
- elsif @user&.include_private_contributions?
= render "events/event/private", event: event

View File

@ -1,11 +1,7 @@
.event-item{ class: current_path?('users#activity') ? 'user-profile-activity gl-border-b-0 !gl-pl-7 gl-pb-3' : '' }
.event-item-timestamp.gl-text-sm
= time_ago_with_tooltip(event.created_at)
.system-note-image.gl-rounded-full.gl-bg-strong.gl-leading-0= sprite_icon('eye-slash', size: 14, css_class: 'icon')
.system-note-image.gl-rounded-full.gl-bg-strong.gl-leading-0= sprite_icon('eye-slash', size: 14, css_class: 'icon')
= event_user_info(event)
= event_user_info(event)
.event-title.gl-flex
= inline_event_icon(event)
= s_('Profiles|Made a private contribution')
.event-title.gl-flex.gl-font-semibold.gl-text-default
= inline_event_icon(event)
= s_('Profiles|Made a private contribution')

View File

@ -3,7 +3,7 @@
- @content_class = 'rd-page-container diffs-container-limited'
= render 'page'
- args = { diffs_slice: @diffs_slice, reload_stream_url: @reload_stream_url, stream_url: @stream_url, show_whitespace: @show_whitespace_default, diff_view: @diff_view, update_user_endpoint: @update_current_user_path, diffs_stats_endpoint: @diffs_stats_endpoint, diff_files_endpoint: @diff_files_endpoint }
- args = { diffs_slice: @diffs_slice, reload_stream_url: @reload_stream_url, stream_url: @stream_url, show_whitespace: @show_whitespace_default, diff_view: @diff_view, update_user_endpoint: @update_current_user_path, diffs_stats_endpoint: @diffs_stats_endpoint, diff_files_endpoint: @diff_files_endpoint, should_sort_metadata_files: true }
= render ::RapidDiffs::AppComponent.new(**args) do |c|
- c.with_diffs_list do
= render RapidDiffs::MergeRequestDiffFileComponent.with_collection(@diffs_slice, merge_request: @merge_request, parallel_view: @diff_view == :parallel)

View File

@ -3,10 +3,10 @@
.settings-sticky-header
.settings-sticky-header-inner
%h3.gl-heading-4.gl-mb-3
= s_('AccessTokens|Require Demonstrating Proof of Possession (DPoP) headers')
= s_('AccessTokens|Use Demonstrating Proof of Possession (DPoP)')
%p.gl-text-secondary
= s_('AccessTokens|Require DPoP headers to access the REST or GraphQL API with a personal access token.')
= link_to s_('AccessTokens|How do I use DPoP headers?'), help_page_path('user/profile/personal_access_tokens.md', anchor: 'require-dpop-headers-with-personal-access-tokens'), target: '_blank', rel: 'noopener noreferrer'
= s_('AccessTokens|Use DPoP to protect your REST or GraphQL API access when using a personal access token.')
= link_to s_('AccessTokens|How do I use DPoP?'), help_page_path('user/profile/personal_access_tokens.md', anchor: 'use-dpop-with-personal-access-tokens'), target: '_blank', rel: 'noopener noreferrer'
.form-group
= f.gitlab_ui_checkbox_component :dpop_enabled, s_('AccessTokens|Enable DPoP'), checkbox_options: { checked: current_user.dpop_enabled }
= f.submit _('Save changes'), pajamas_button: true

View File

@ -31,7 +31,7 @@
= render Pajamas::ButtonComponent.new(variant: :link, button_options: { class: 'js-retry-load' }) do
= s_('UserProfile|Retry')
.user-calendar-activities
.user-calendar-activities.gl-mb-5
.overview-content-list.user-activity-content.gl-mb-5{ data: { href: user_activity_path, testid: 'user-activity-content' } }
= gl_loading_icon(size: 'md', css_class: 'loading')

View File

@ -2,36 +2,7 @@
= html_escape(_("Contributions for %{calendar_date}")) % { calendar_date: tag.strong(@calendar_date.to_fs(:medium)) }
- if @events.any?
%ul.bordered-list
- @events.sort_by(&:created_at).each do |event|
%li
%span.light.js-localtime{ :data => { :datetime => event.created_at.utc.strftime('%Y-%m-%dT%H:%M:%SZ'), :toggle => 'tooltip', :placement => 'top' } }
= sprite_icon('clock', css_class: 'gl-align-text-bottom')
= event.created_at.to_time.in_time_zone(local_timezone_instance(@user.timezone)).strftime('%-I:%M%P')
- if event.visible_to_user?(current_user)
- if event.push_action?
#{event.action_name} #{event.ref_type}
%strong
- commits_path = project_commits_path(event.project, event.ref_name)
= link_to_if event.project.repository.branch_exists?(event.ref_name), event.ref_name, commits_path
- else
= event_action_name(event)
%strong
- if event.note?
= link_to event.note_target.to_reference, event_note_target_url(event), class: 'has-tooltip', title: event.target_title
- elsif event.target
= link_to event.target.to_reference, Gitlab::UrlBuilder.build(event.target, only_path: true), class: 'has-tooltip', title: event.target_title
= s_('UserProfile|at')
%strong
- if event.project
= link_to_project(event.project)
- elsif event.group
= link_to_group(event.group)
- else
= event.resource_parent_name
- else
= s_('UserProfile|made a private contribution')
= render partial: 'events/event', collection: @events.sort_by(&:created_at), locals: { timezone: local_timezone_instance(@user.timezone) }
- else
%p
= _('No contributions were found')

View File

@ -43162,6 +43162,7 @@ The status of the workflow.
| <a id="duoworkflowstatusplan_approval_required"></a>`PLAN_APPROVAL_REQUIRED` | The workflow is plan_approval_required. |
| <a id="duoworkflowstatusrunning"></a>`RUNNING` | The workflow is running. |
| <a id="duoworkflowstatusstopped"></a>`STOPPED` | The workflow is stopped. |
| <a id="duoworkflowstatustool_call_approval_required"></a>`TOOL_CALL_APPROVAL_REQUIRED` | The workflow is tool_call_approval_required. |
### `EntryType`

View File

@ -3029,16 +3029,8 @@ In the following example, the value of `uid` attribute in the SAML response is s
## Assertion encryption (optional)
GitLab requires the use of TLS encryption with SAML 2.0. Sometimes, GitLab needs
additional assertion encryption. For example, if you:
- Terminate TLS encryption early at a load balancer.
- Include sensitive details in assertions that you do not want appearing in logs.
Most organizations should not need additional encryption at this layer.
Your IdP encrypts the assertion with the public certificate of GitLab.
GitLab decrypts the `EncryptedAssertion` with its private key.
Encrypting the SAML assertion is optional but recommended. This adds an additional layer of protection
to prevent unencrypted data being logged or intercepted by malicious actors.
{{< alert type="note" >}}
@ -3047,9 +3039,9 @@ assertion encryption and request signing.
{{< /alert >}}
The SAML integration supports `EncryptedAssertion`. To encrypt your assertions,
define the private key and the public certificate of your GitLab instance in the
SAML settings.
To encrypt your SAML assertions, define the private key and the public certificate in the GitLab
SAML settings. Your IdP encrypts the assertion with the public certificate and
GitLab decrypts the assertion with the private key.
When you define the key and certificate, replace all line feeds in the key file with `\n`.
This makes the key file one long string with no line feeds.

View File

@ -446,6 +446,29 @@ occurs:
In [GitLab 17.4 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/463064), security policy
projects are excluded from push rules that enforce branch name validation.
### Security policy projects
To prevent the exposure of sensitive information that was intended to remain private in your security policy project, when you link security policy projects to other projects:
- Don't include sensitive content in your security policy projects.
- Before linking a private security policy project, review the member list of the target project to ensure all members should have access to your policy content.
- Evaluate the visibility settings of target projects.
- Use [security policy management](../../compliance/audit_event_types.md#security-policy-management) audit logs to monitor project linking.
These recommendations prevent sensitive information exposure for the following reasons:
- Shared visibility: When a private security project is linked to another project, users with access to the **Security Policies** page of the linked project can view the contents of the `.gitlab/security-policies/policy.yml` file. This includes linking a private security policy project to a public project, which can expose the policy contents to anyone who can access the public project.
- Access control: All members of the project to which a private security project is linked can view the policy file on the **Policy** page, even if they don't have access to the original private repository.
### Security and compliance controls
Project maintainers can create policies for projects that interfere with the execution of policies for groups. To limit who can modify policies for groups and ensure that compliance requirements are being met, when you implement critical security or compliance controls:
- Use custom roles to restrict who can create or modify pipeline execution policies at the project level.
- Configure protected branches for the default branch in your security policy projects to prevent direct pushes.
- Set up merge request approval rules in your security policy projects that require review from designated approvers.
- Monitor and review all policy changes in policies for both groups and projects.
## Policy management
The Policies page displays deployed policies for all available environments. You can check a

View File

@ -384,7 +384,7 @@ Prerequisites:
You can now create personal access tokens for a service account user with no expiry date.
## Require DPoP headers with personal access tokens
## Use DPoP with personal access tokens
{{< details >}}
@ -414,14 +414,17 @@ signed DPoP header requires your corresponding private SSH key.
{{< alert type="note" >}}
If you enable this feature, all REST and GraphQL API requests without a valid DPoP header fail with a `DpopValidationError`.
If you enable this feature, all API requests without a valid DPoP header return a `DpopValidationError` error.
DPoP header is not required for Git operations over HTTPS that include an access token.
{{< /alert >}}
Prerequisites:
- You must have [added at least one public SSH key](../ssh.md#add-an-ssh-key-to-your-gitlab-account)
to your account, with the **Usage type** of **Signing**, or **Authentication & Signing**.
- You must [add at least one public SSH key](../ssh.md#add-an-ssh-key-to-your-gitlab-account)
to your account, with a **Usage type** of **Signing** or **Authentication & Signing**.
- Your SSH key type must be RSA.
- You must have installed and configured the [GitLab CLI](../../editor_extensions/gitlab_cli/_index.md)
for your GitLab account.
@ -430,14 +433,14 @@ To require DPoP on all calls to the REST and GraphQL APIs:
1. On the left sidebar, select your avatar.
1. Select **Edit profile**.
1. On the left sidebar, select **Access Tokens**.
1. Go to the **Use Demonstrating Proof of Possession** section, and select **Enable DPoP**.
1. Go to the **Use Demonstrating Proof of Possession (DPoP)** section, and select **Enable DPoP**.
1. Select **Save changes**.
1. To generate a DPoP header with the [GitLab CLI](../../editor_extensions/gitlab_cli/_index.md),
run this command in your terminal. Replace `<your_access_token>` with your access token, and `~/.ssh/id_rsa`
with the location of your private key:
```shell
bin/glab auth dpop-gen --pat "<your_access_token>" --private-key ~/.ssh/id_rsa
glab auth dpop-gen --pat "<your_access_token>" --private-key ~/.ssh/id_rsa
```
The DPoP header you generated in the CLI can be used:
@ -463,7 +466,7 @@ The DPoP header you generated in the CLI can be used:
"https://gitlab.example.com/api/graphql"
```
To learn more about DPoP headers, see the blueprint
To learn more about DPoP, see the blueprint
[Sender Constraining Personal Access Tokens](https://gitlab.com/gitlab-com/gl-security/product-security/appsec/security-feature-blueprints/-/tree/main/sender_constraining_access_tokens).
## Create a personal access token programmatically

View File

@ -383,7 +383,9 @@ Use `**` to match zero or more directories recursively:
{{< history >}}
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180162) in GitLab 17.10.
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180162) in GitLab 17.10 [with a flag](../../../administration/feature_flags.md) named `codeowners_file_exclusions`.
- [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/517075) in GitLab 17.10.
- [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/517309) in GitLab 17.11. Feature flag `codeowners_file_exclusions` removed.
{{< /history >}}

View File

@ -344,6 +344,8 @@ Streaming of Code Generation responses is supported in JetBrains and Visual Stud
perceived faster response times.
Other supported IDEs will return the generated code in a single block.
Streaming is not enabled for code completion.
### Direct and indirect connections
{{< history >}}

View File

@ -69,15 +69,15 @@ module ActiveContext
fields.each do |field|
case field
when Field::Vector
# Vector fields have fixed size based on dimensions
fixed_columns << [field, field.options[:dimensions] * 4]
when Field::Bigint
# Bigint is 8 bytes
fixed_columns << [field, 8]
when Field::Keyword, Field::Text
# Text fields are variable width
variable_columns << field
when Field::Vector
# Vector fields have fixed size based on dimensions
fixed_columns << [field, field.options[:dimensions] * 4]
else
raise ArgumentError, "Unknown field type: #{field.class}"
end
@ -89,12 +89,12 @@ module ActiveContext
def add_column_from_field(table, field)
case field
when Field::Vector
table.column(field.name, "vector(#{field.options[:dimensions]})")
when Field::Bigint
table.bigint(field.name, **field.options.except(:index))
when Field::Keyword, Field::Text
table.text(field.name, **field.options.except(:index))
when Field::Vector
table.column(field.name, "vector(#{field.options[:dimensions]})")
else
raise ArgumentError, "Unknown field type: #{field.class}"
end

View File

@ -3112,7 +3112,7 @@ msgstr ""
msgid "AccessTokens|Grants read-write access to repositories on private projects using Git-over-HTTP (not using the API)."
msgstr ""
msgid "AccessTokens|How do I use DPoP headers?"
msgid "AccessTokens|How do I use DPoP?"
msgstr ""
msgid "AccessTokens|IP: %{ips}"
@ -3165,12 +3165,6 @@ msgstr ""
msgid "AccessTokens|Personal access tokens"
msgstr ""
msgid "AccessTokens|Require DPoP headers to access the REST or GraphQL API with a personal access token."
msgstr ""
msgid "AccessTokens|Require Demonstrating Proof of Possession (DPoP) headers"
msgstr ""
msgid "AccessTokens|Revoke"
msgstr ""
@ -3252,6 +3246,12 @@ msgstr ""
msgid "AccessTokens|Usage"
msgstr ""
msgid "AccessTokens|Use DPoP to protect your REST or GraphQL API access when using a personal access token."
msgstr ""
msgid "AccessTokens|Use Demonstrating Proof of Possession (DPoP)"
msgstr ""
msgid "AccessTokens|View token usage information"
msgstr ""
@ -53667,9 +53667,6 @@ msgstr ""
msgid "Secrets manager"
msgstr ""
msgid "Secrets|Actions"
msgstr ""
msgid "Secrets|Add a description for the secret"
msgstr ""
@ -53697,9 +53694,6 @@ msgstr ""
msgid "Secrets|Created"
msgstr ""
msgid "Secrets|Delete"
msgstr ""
msgid "Secrets|Delete Secret"
msgstr ""
@ -53712,9 +53706,6 @@ msgstr ""
msgid "Secrets|Edit %{id}"
msgstr ""
msgid "Secrets|Edit secret"
msgstr ""
msgid "Secrets|Enable the Secrets Manager to securely store and manage sensitive information for this project."
msgstr ""
@ -53748,9 +53739,6 @@ msgstr ""
msgid "Secrets|Provisioning in progress"
msgstr ""
msgid "Secrets|Revoke"
msgstr ""
msgid "Secrets|Rotation period"
msgstr ""
@ -65880,12 +65868,6 @@ msgstr ""
msgid "UserProfile|Your projects can be available publicly, internally, or privately, at your choice."
msgstr ""
msgid "UserProfile|at"
msgstr ""
msgid "UserProfile|made a private contribution"
msgstr ""
msgid "UserProfile|updated %{updated}"
msgstr ""

View File

@ -60,7 +60,6 @@ ee/spec/frontend/status_checks/mount_spec.js
ee/spec/frontend/usage_quotas/transfer/components/usage_by_month_spec.js
ee/spec/frontend/users/identity_verification/components/international_phone_input_spec.js
ee/spec/frontend/users/identity_verification/components/verify_phone_verification_code_spec.js
ee/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js
ee/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js
spec/frontend/__helpers__/vue_test_utils_helper_spec.js
spec/frontend/access_tokens/index_spec.js

View File

@ -11,6 +11,7 @@ RSpec.describe RapidDiffs::AppComponent, type: :component, feature_category: :co
let(:update_user_endpoint) { '/update_user' }
let(:diffs_stats_endpoint) { '/diffs_stats' }
let(:diff_files_endpoint) { '/diff_files_metadata' }
let(:should_sort_metadata_files) { false }
it "renders diffs slice" do
render_component
@ -26,6 +27,16 @@ RSpec.describe RapidDiffs::AppComponent, type: :component, feature_category: :co
expect(app['data-diff-files-endpoint']).to eq(diff_files_endpoint)
end
context "with should_sort_metadata_files set to true" do
let(:should_sort_metadata_files) { true }
it "renders should_sort_metadata_files" do
render_component
app = page.find('[data-rapid-diffs]')
expect(app['data-should-sort-metadata-files']).to eq('true')
end
end
it "renders view settings" do
render_component
settings = page.find('[data-view-settings]')
@ -116,6 +127,7 @@ RSpec.describe RapidDiffs::AppComponent, type: :component, feature_category: :co
update_user_endpoint:,
diffs_stats_endpoint:,
diff_files_endpoint:,
should_sort_metadata_files:,
lazy:
)
end

View File

@ -21,4 +21,17 @@ RSpec.describe 'User views diffs', :js, feature_category: :code_review_workflow
it 'shows the last diff file' do
expect(page).to have_selector('[data-testid="rd-diff-file"]', text: last_commit_text)
end
it 'has matching diff file order' do
skip 'MR streaming has wrong order for the diffs, remove skip once the order is correct'
browser_item_selector = '[data-testid="file-row-name-container"]:not(:has([data-testid="folder-open-icon"]))'
browser_item_titles = page.find_all(browser_item_selector).map { |element| element.text.delete("\n").strip }
# TODO: fix this selector, do not rely on classes
diff_titles = page.find_all('.rd-diff-file-title strong:first-of-type').map do |element|
element.text.delete("\n").strip
end
expect(browser_item_titles.each_with_index.all? do |browser_item_title, index|
diff_titles[index].end_with?(browser_item_title)
end).to be(true)
end
end

View File

@ -628,6 +628,14 @@ describe('diffs/components/app', () => {
wrapper.findComponent(DiffsFileTree).vm.$emit('clickFile', file);
expect(store.goToFile).toHaveBeenCalledWith({ path: file.path });
});
it('should handle toggleFolder events', () => {
const file = { path: '111.js' };
store.treeEntries = { 111: { type: 'blob', fileHash: '111', path: '111.js' } };
createComponent();
wrapper.findComponent(DiffsFileTree).vm.$emit('toggleFolder', file);
expect(store.toggleTreeOpen).toHaveBeenCalledWith(file);
});
});
});

View File

@ -55,6 +55,13 @@ describe('DiffsFileTree', () => {
expect(wrapper.emitted('clickFile')).toStrictEqual([[obj]]);
});
it('re-emits toggleFolder event', () => {
const obj = {};
createComponent();
wrapper.findComponent(TreeList).vm.$emit('toggleFolder', obj);
expect(wrapper.emitted('toggleFolder')).toStrictEqual([[obj]]);
});
it('sets current file on click', () => {
const file = { fileHash: 'foo' };
createComponent();
@ -200,6 +207,7 @@ describe('DiffsFileTree', () => {
});
it('passes down props to tree list', async () => {
const groupBlobsListItems = false;
const loadedFiles = { foo: true };
const totalFilesCount = '20';
const rowHeight = 30;
@ -208,10 +216,11 @@ describe('DiffsFileTree', () => {
return `${rowHeight}px`;
},
});
createComponent({ loadedFiles, totalFilesCount });
createComponent({ loadedFiles, totalFilesCount, groupBlobsListItems });
await nextTick();
expect(wrapper.findComponent(TreeList).props('loadedFiles')).toBe(loadedFiles);
expect(wrapper.findComponent(TreeList).props('totalFilesCount')).toBe(totalFilesCount);
expect(wrapper.findComponent(TreeList).props('rowHeight')).toBe(rowHeight);
expect(wrapper.findComponent(TreeList).props('groupBlobsListItems')).toBe(groupBlobsListItems);
});
});

View File

@ -107,6 +107,22 @@ describe('Diffs tree list component', () => {
file_path: 'app/index.js',
file_hash: 'app-index',
},
'unordered.rb': {
addedLines: 0,
changed: true,
deleted: false,
fileHash: 'unordered',
key: 'unordered.rb',
name: 'unordered.rb',
path: 'unordered.rb',
removedLines: 0,
tempFile: true,
type: 'blob',
parentPath: '/',
tree: [],
file_path: 'unordered.rb',
file_hash: 'unordered',
},
'test.rb': {
addedLines: 0,
changed: true,
@ -143,11 +159,12 @@ describe('Diffs tree list component', () => {
useLegacyDiffs().treeEntries = treeEntries;
useLegacyDiffs().tree = [
treeEntries.LICENSE,
{
...treeEntries.app,
tree: [treeEntries.javascript, treeEntries['index.js'], treeEntries['test.rb']],
},
treeEntries['unordered.rb'],
treeEntries.LICENSE,
];
return treeEntries;
@ -192,7 +209,7 @@ describe('Diffs tree list component', () => {
${'*.js'} | ${2}
${'index.js'} | ${2}
${'app/*.js'} | ${2}
${'*.js, *.rb'} | ${3}
${'*.js, *.rb'} | ${5}
`('returns $itemSize item for $extension', async ({ extension, itemSize }) => {
const input = findDiffTreeSearch();
@ -205,7 +222,19 @@ describe('Diffs tree list component', () => {
});
it('renders tree', () => {
expect(getScroller().props('items')).toHaveLength(6);
expect(
getScroller()
.props('items')
.map((item) => item.path),
).toStrictEqual([
'app',
'app/javascript',
'app/javascript/file.rb',
'app/index.js',
'app/test.rb',
'unordered.rb',
'LICENSE',
]);
});
it('re-emits clickFile event', () => {
@ -219,17 +248,43 @@ describe('Diffs tree list component', () => {
expect(getFileRow().props('hideFileStats')).toBe(true);
});
it('calls toggleTreeOpen when clicking folder', () => {
it('re-emits toggleTreeOpen event as toggleFolder', () => {
getFileRow().vm.$emit('toggleTreeOpen', 'app');
expect(useLegacyDiffs().toggleTreeOpen).toHaveBeenCalledWith('app');
expect(wrapper.emitted('toggleFolder')).toStrictEqual([['app']]);
});
it('renders when renderTreeList is false', async () => {
useLegacyDiffs().renderTreeList = false;
describe('when renderTreeList is false', () => {
beforeEach(() => {
useLegacyDiffs().renderTreeList = false;
});
await nextTick();
expect(getScroller().props('items')).toHaveLength(5);
it('renders list items', async () => {
await nextTick();
expect(
getScroller()
.props('items')
.map((item) => item.path),
).toStrictEqual(['app', 'app/index.js', 'app/test.rb', '/', 'unordered.rb', 'LICENSE']);
});
it('renders ungrouped list items', async () => {
createComponent({ groupBlobsListItems: false });
await nextTick();
expect(
getScroller()
.props('items')
.map((item) => item.path),
).toStrictEqual([
'app',
'app/index.js',
'/',
'unordered.rb',
'app',
'app/test.rb',
'/',
'LICENSE',
]);
});
});
it('dispatches setTreeOpen with all paths for the current diff file', async () => {
@ -396,36 +451,43 @@ describe('Diffs tree list component', () => {
});
describe('loading state', () => {
const getLoadedFiles = (offset = 1) =>
useLegacyDiffs()
.tree.slice(offset)
.reduce((acc, el) => {
acc[el.fileHash] = true;
return acc;
}, {});
const getLoadingFile = () => useLegacyDiffs().tree[2];
const getRootItems = () =>
getScroller()
.props('items')
.filter((item) => item.type !== 'tree');
const findLoadingItem = (loadedFile) =>
getRootItems().find((item) => item.type !== 'tree' && item.fileHash !== loadedFile.fileHash);
const findLoadedItem = (loadedFile) =>
getRootItems().find((item) => item.type !== 'tree' && item.fileHash === loadedFile.fileHash);
beforeEach(() => {
setupFilesInState();
});
it('sets loading state for loading files', () => {
const loadedFiles = getLoadedFiles();
createComponent({ loadedFiles });
const [firstItem, secondItem] = getScroller().props('items');
expect(firstItem.loading).toBe(true);
expect(secondItem.loading).toBe(false);
const loadedFile = getLoadingFile();
createComponent({ loadedFiles: { [loadedFile.fileHash]: true } });
const loadedItem = findLoadedItem(loadedFile);
const loadingItem = findLoadingItem(loadedFile);
expect(loadingItem.loading).toBe(true);
expect(loadedItem.loading).toBe(false);
});
it('is not focusable', () => {
const loadedFiles = getLoadedFiles();
createComponent({ loadedFiles });
expect(wrapper.findAllComponents(DiffFileRow).at(0).attributes('tabindex')).toBe('-1');
const loadedFile = getLoadingFile();
createComponent({ loadedFiles: { [loadedFile.fileHash]: true } });
const loadingItemIndex = getScroller().props('items').indexOf(findLoadingItem(loadedFile));
expect(
wrapper.findAllComponents(DiffFileRow).at(loadingItemIndex).attributes('tabindex'),
).toBe('-1');
});
it('ignores clicks on loading files', () => {
const loadedFiles = getLoadedFiles();
createComponent({ loadedFiles });
wrapper.findAllComponents(DiffFileRow).at(0).vm.$emit('clickFile', {});
const loadedFile = getLoadingFile();
createComponent({ loadedFiles: { [loadedFile.fileHash]: true } });
const loadingItemIndex = getScroller().props('items').indexOf(findLoadingItem(loadedFile));
wrapper.findAllComponents(DiffFileRow).at(loadingItemIndex).vm.$emit('clickFile', {});
expect(wrapper.emitted('clickFile')).toBe(undefined);
});
});

View File

@ -38,8 +38,8 @@ const MOCK_DELETE_PARAMS = {
testParam: true,
};
jest.mock('ee_else_ce/vue_shared/components/groups_list/utils', () => ({
...jest.requireActual('ee_else_ce/vue_shared/components/groups_list/utils'),
jest.mock('~/vue_shared/components/groups_list/utils', () => ({
...jest.requireActual('~/vue_shared/components/groups_list/utils'),
renderDeleteSuccessToast: jest.fn(),
deleteParams: jest.fn(() => MOCK_DELETE_PARAMS),
}));

View File

@ -41,7 +41,13 @@ describe('Rapid Diffs App', () => {
initFileBrowser.mockResolvedValue();
setHTMLFixture(
`
<div data-rapid-diffs data-reload-stream-url="/reload" data-diffs-stats-endpoint="/stats" data-diff-files-endpoint="/diff-files-metadata">
<div
data-rapid-diffs
data-reload-stream-url="/reload"
data-diffs-stats-endpoint="/stats"
data-diff-files-endpoint="/diff-files-metadata"
data-should-sort-metadata-files="true"
>
<div id="js-stream-container" data-diffs-stream-url="/stream"></div>
</div>
`,
@ -59,7 +65,7 @@ describe('Rapid Diffs App', () => {
expect(window.customElements.define).toHaveBeenCalledWith('streaming-error', StreamingError);
expect(initHiddenFilesWarning).toHaveBeenCalled();
expect(fixWebComponentsStreamingOnSafari).toHaveBeenCalled();
expect(initFileBrowser).toHaveBeenCalledWith('/diff-files-metadata');
expect(initFileBrowser).toHaveBeenCalledWith('/diff-files-metadata', true);
});
it('streams remaining diffs', () => {
@ -82,4 +88,23 @@ describe('Rapid Diffs App', () => {
document.dispatchEvent(new CustomEvent(DIFF_FILE_MOUNTED));
expect(useDiffsList(pinia).addLoadedFile).toHaveBeenCalled();
});
it('skips sorting', () => {
setHTMLFixture(
`
<div
data-rapid-diffs
data-reload-stream-url="/reload"
data-diffs-stats-endpoint="/stats"
data-diff-files-endpoint="/diff-files-metadata"
data-should-sort-metadata-files="false"
>
<div id="js-stream-container" data-diffs-stream-url="/stream"></div>
</div>
`,
);
createApp();
app.init();
expect(initFileBrowser).toHaveBeenCalledWith('/diff-files-metadata', false);
});
});

View File

@ -8,6 +8,7 @@ import store from '~/mr_notes/stores';
import { useDiffsList } from '~/rapid_diffs/stores/diffs_list';
import { useFileBrowser } from '~/diffs/stores/file_browser';
import { useDiffsView } from '~/rapid_diffs/stores/diffs_view';
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
Vue.use(PiniaVuePlugin);
@ -27,6 +28,7 @@ describe('FileBrowser', () => {
useDiffsList();
useDiffsView();
useFileBrowser();
useLegacyDiffs();
});
it('passes down props', () => {
@ -63,4 +65,11 @@ describe('FileBrowser', () => {
await wrapper.findComponent(DiffsFileTree).vm.$emit('clickFile', file);
expect(wrapper.emitted('clickFile')).toStrictEqual([[file]]);
});
it('handles toggleFolder', async () => {
const path = 'foo';
createComponent();
await wrapper.findComponent(DiffsFileTree).vm.$emit('toggleFolder', path);
expect(useLegacyDiffs().toggleTreeOpen).toHaveBeenCalledWith(path);
});
});

View File

@ -15,6 +15,7 @@ jest.mock('~/rapid_diffs/app/file_browser.vue', () => ({
return h('div', {
attrs: {
'data-file-browser-component': true,
'data-group-blobs-list-items': JSON.stringify(this.groupBlobsListItems),
},
on: {
click: () => {
@ -120,4 +121,10 @@ describe('Init file browser', () => {
await waitForPromises();
expect(document.querySelector('[data-file-browser-toggle-component]')).not.toBe(null);
});
it('disables sorting', async () => {
initFileBrowser(diffFilesEndpoint, false);
await waitForPromises();
expect(document.querySelector('[data-group-blobs-list-items="false"]')).not.toBe(null);
});
});

View File

@ -0,0 +1,62 @@
import { GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { helpPagePath } from '~/helpers/help_page_helper';
import GroupListItemDelayedDeletionModalFooter from '~/vue_shared/components/groups_list/group_list_item_delayed_deletion_modal_footer.vue';
import { groups } from 'jest/vue_shared/components/groups_list/mock_data';
describe('GroupListItemDelayedDeletionModalFooter', () => {
let wrapper;
const [group] = groups;
const MOCK_PERM_DELETION_DATE = '2024-03-31';
const HELP_PATH = helpPagePath('user/group/_index', {
anchor: 'restore-a-group',
});
const defaultProps = {
group,
};
const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMountExtended(GroupListItemDelayedDeletionModalFooter, {
propsData: { ...defaultProps, ...props },
stubs: {
GlSprintf,
},
});
};
const findDelayedDeletionModalFooter = () => wrapper.findByTestId('delayed-delete-modal-footer');
const findGlLink = () => wrapper.findComponent(GlLink);
describe.each`
isAdjournedDeletionEnabled | markedForDeletionOn | footer | link
${false} | ${null} | ${false} | ${false}
${false} | ${'2024-03-24'} | ${false} | ${false}
${true} | ${null} | ${`This group can be restored until ${MOCK_PERM_DELETION_DATE}. Learn more.`} | ${HELP_PATH}
${true} | ${'2024-03-24'} | ${false} | ${false}
`(
'when group.isAdjournedDeletionEnabled is $isAdjournedDeletionEnabled and group.markedForDeletionOn is $markedForDeletionOn',
({ isAdjournedDeletionEnabled, markedForDeletionOn, footer, link }) => {
beforeEach(() => {
createComponent({
props: {
group: {
...group,
isAdjournedDeletionEnabled,
markedForDeletionOn,
permanentDeletionDate: MOCK_PERM_DELETION_DATE,
},
},
});
});
it(`does ${footer ? 'render' : 'not render'} the delayed deletion modal footer`, () => {
expect(
findDelayedDeletionModalFooter().exists() && findDelayedDeletionModalFooter().text(),
).toBe(footer);
expect(findGlLink().exists() && findGlLink().attributes('href')).toBe(link);
});
},
);
});

View File

@ -1,61 +1,132 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { stubComponent } from 'helpers/stub_component';
import GroupListItemDeleteModal from '~/vue_shared/components/groups_list/group_list_item_delete_modal.vue';
import GroupListItemDelayedDeletionModalFooter from '~/vue_shared/components/groups_list/group_list_item_delayed_deletion_modal_footer.vue';
import DangerConfirmModal from '~/vue_shared/components/confirm_danger/confirm_danger_modal.vue';
import { groups } from 'jest/vue_shared/components/groups_list/mock_data';
describe('GroupListItemDeleteModalCE', () => {
describe('GroupListItemDeleteModal', () => {
let wrapper;
const [group] = groups;
const MOCK_PERM_DELETION_DATE = '2024-03-31';
const DELETE_MODAL_BODY_OVERRIDE = `This group is scheduled to be deleted on ${MOCK_PERM_DELETION_DATE}. You are about to delete this group, including its subgroups and projects, immediately. This action cannot be undone.`;
const DELETE_MODAL_TITLE_OVERRIDE = 'Delete group immediately?';
const DEFAULT_DELETE_MODAL_TITLE = 'Are you absolutely sure?';
const defaultProps = {
modalId: '123',
phrase: 'mock phrase',
group,
};
const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMountExtended(GroupListItemDeleteModal, {
propsData: { ...defaultProps, ...props },
stubs: {
GlSprintf,
DangerConfirmModal: stubComponent(DangerConfirmModal, {
template: '<div><slot name="modal-body"></slot><slot name="modal-footer"></slot></div>',
}),
},
});
};
const findDangerConfirmModal = () => wrapper.findComponent(DangerConfirmModal);
const findDelayedDeletionModalFooter = () =>
wrapper.findComponent(GroupListItemDelayedDeletionModalFooter);
it('renders modal footer', () => {
createComponent({ props: { visible: true } });
expect(findDelayedDeletionModalFooter().props('group')).toEqual(group);
});
describe('when visible is false', () => {
beforeEach(() => {
createComponent({ props: { visible: false } });
});
it('does not render modal', () => {
expect(findDangerConfirmModal().exists()).toBe(false);
it('does not show modal', () => {
expect(findDangerConfirmModal().props('visible')).toBe(false);
});
});
describe('when visible is true', () => {
beforeEach(() => {
createComponent({ props: { visible: true } });
});
describe('delete modal overrides', () => {
describe.each`
isAdjournedDeletionEnabled | markedForDeletionOn | modalTitle | modalBody
${false} | ${false} | ${DELETE_MODAL_TITLE_OVERRIDE} | ${DELETE_MODAL_BODY_OVERRIDE}
${true} | ${false} | ${DEFAULT_DELETE_MODAL_TITLE} | ${''}
${false} | ${'2024-03-24'} | ${DELETE_MODAL_TITLE_OVERRIDE} | ${DELETE_MODAL_BODY_OVERRIDE}
${true} | ${'2024-03-24'} | ${DELETE_MODAL_TITLE_OVERRIDE} | ${DELETE_MODAL_BODY_OVERRIDE}
`(
'when group isAdjournedDeletionEnabled is $isAdjournedDeletionEnabled and markedForDeletionOn is $markedForDeletionOn',
({ isAdjournedDeletionEnabled, markedForDeletionOn, modalTitle, modalBody }) => {
beforeEach(() => {
createComponent({
props: {
visible: true,
group: {
...group,
parent: { id: 1 },
permanentDeletionDate: MOCK_PERM_DELETION_DATE,
isAdjournedDeletionEnabled,
markedForDeletionOn,
},
},
});
});
it('does render modal', () => {
expect(findDangerConfirmModal().exists()).toBe(true);
});
it(`${
modalTitle === DELETE_MODAL_TITLE_OVERRIDE ? 'does' : 'does not'
} override deletion modal title`, () => {
expect(findDangerConfirmModal().props('modalTitle')).toBe(modalTitle);
});
describe('when confirm is emitted', () => {
it(`${modalBody ? 'does' : 'does not'} override deletion modal body`, () => {
expect(findDangerConfirmModal().text()).toBe(modalBody);
});
},
);
});
describe('events', () => {
describe('deletion modal events', () => {
beforeEach(() => {
findDangerConfirmModal().vm.$emit('confirm', {
preventDefault: jest.fn(),
createComponent({
props: {
visible: true,
group: {
...group,
parent: { id: 1 },
},
},
});
});
it('emits `confirm` event to parent', () => {
expect(wrapper.emitted('confirm')).toHaveLength(1);
});
});
describe('when confirm is emitted', () => {
beforeEach(() => {
findDangerConfirmModal().vm.$emit('confirm', {
preventDefault: jest.fn(),
});
});
describe('when change is emitted', () => {
beforeEach(() => {
findDangerConfirmModal().vm.$emit('change', false);
it('emits `confirm` event to parent', () => {
expect(wrapper.emitted('confirm')).toHaveLength(1);
});
});
it('emits `change` event to parent', () => {
expect(wrapper.emitted('change')).toMatchObject([[false]]);
describe('when change is emitted', () => {
beforeEach(() => {
findDangerConfirmModal().vm.$emit('change', false);
});
it('emits `change` event to parent', () => {
expect(wrapper.emitted('change')).toMatchObject([[false]]);
});
});
});
});

View File

@ -0,0 +1,45 @@
import { GlBadge } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import GroupListItemInactiveBadge from '~/vue_shared/components/groups_list/group_list_item_inactive_badge.vue';
import { groups } from 'jest/vue_shared/components/groups_list/mock_data';
describe('GroupListItemInactiveBadge', () => {
let wrapper;
const [group] = groups;
const defaultProps = { group };
const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMountExtended(GroupListItemInactiveBadge, {
propsData: { ...defaultProps, ...props },
});
};
const findGlBadge = () => wrapper.findComponent(GlBadge);
describe.each`
markedForDeletionOn | variant | text
${null} | ${false} | ${false}
${'2024-01-01'} | ${'warning'} | ${'Pending deletion'}
`(
'when group.markedForDeletionOn is $markedForDeletionOn',
({ markedForDeletionOn, variant, text }) => {
beforeEach(() => {
createComponent({
props: {
group: {
...group,
markedForDeletionOn,
},
},
});
});
it('renders the badge correctly', () => {
expect(findGlBadge().exists() && findGlBadge().props('variant')).toBe(variant);
expect(findGlBadge().exists() && findGlBadge().text()).toBe(text);
});
},
);
});

View File

@ -6,7 +6,8 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import GroupsListItem from '~/vue_shared/components/groups_list/groups_list_item.vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import waitForPromises from 'helpers/wait_for_promises';
import GroupListItemDeleteModal from 'ee_else_ce/vue_shared/components/groups_list/group_list_item_delete_modal.vue';
import GroupListItemDeleteModal from '~/vue_shared/components/groups_list/group_list_item_delete_modal.vue';
import GroupListItemInactiveBadge from '~/vue_shared/components/groups_list/group_list_item_inactive_badge.vue';
import GroupListItemPreventDeleteModal from '~/vue_shared/components/groups_list/group_list_item_prevent_delete_modal.vue';
import {
VISIBILITY_TYPE_ICON,
@ -21,7 +22,7 @@ import {
TIMESTAMP_TYPE_CREATED_AT,
TIMESTAMP_TYPE_UPDATED_AT,
} from '~/vue_shared/components/resource_lists/constants';
import { renderDeleteSuccessToast } from 'ee_else_ce/vue_shared/components/groups_list/utils';
import { renderDeleteSuccessToast } from '~/vue_shared/components/groups_list/utils';
import { createAlert } from '~/alert';
import { groups } from './mock_data';
@ -29,8 +30,8 @@ const MOCK_DELETE_PARAMS = {
testParam: true,
};
jest.mock('ee_else_ce/vue_shared/components/groups_list/utils', () => ({
...jest.requireActual('ee_else_ce/vue_shared/components/groups_list/utils'),
jest.mock('~/vue_shared/components/groups_list/utils', () => ({
...jest.requireActual('~/vue_shared/components/groups_list/utils'),
renderDeleteSuccessToast: jest.fn(),
deleteParams: jest.fn(() => MOCK_DELETE_PARAMS),
}));
@ -66,6 +67,7 @@ describe('GroupsListItem', () => {
const findAccessLevelBadge = () => wrapper.findByTestId('user-access-role');
const findTimeAgoTooltip = () => wrapper.findComponent(TimeAgoTooltip);
const fireDeleteAction = () => findListActions().props('actions')[ACTION_DELETE].action();
const findInactiveBadge = () => wrapper.findComponent(GroupListItemInactiveBadge);
const deleteModalFireConfirmEvent = async () => {
findConfirmationModal().vm.$emit('confirm', {
preventDefault: jest.fn(),
@ -462,4 +464,10 @@ describe('GroupsListItem', () => {
expect(wrapper.findByTestId('children').exists()).toBe(true);
});
it('renders inactive badge', () => {
createComponent();
expect(findInactiveBadge().exists()).toBe(true);
});
});

View File

@ -1,30 +1,76 @@
import organizationGroupsGraphQlResponse from 'test_fixtures/graphql/organizations/groups.query.graphql.json';
import { deleteParams, renderDeleteSuccessToast } from '~/vue_shared/components/groups_list/utils';
import { formatGroups } from '~/organizations/shared/utils';
import toast from '~/vue_shared/plugins/global_toast';
jest.mock('~/vue_shared/plugins/global_toast');
const {
data: {
organization: {
groups: { nodes: groups },
},
},
} = organizationGroupsGraphQlResponse;
const MOCK_GROUP_NO_DELAY_DELETION = {
fullName: 'No Delay Group',
fullPath: 'path/to/group/1',
isAdjournedDeletionEnabled: false,
markedForDeletionOn: null,
permanentDeletionDate: null,
};
const MOCK_GROUP_WITH_DELAY_DELETION = {
fullName: 'With Delay Group',
fullPath: 'path/to/group/2',
isAdjournedDeletionEnabled: true,
markedForDeletionOn: null,
permanentDeletionDate: '2024-03-31',
};
const MOCK_GROUP_PENDING_DELETION = {
fullName: 'Pending Deletion Group',
fullPath: 'path/to/group/3',
isAdjournedDeletionEnabled: true,
markedForDeletionOn: '2024-03-24',
permanentDeletionDate: '2024-03-31',
};
describe('renderDeleteSuccessToast', () => {
const [MOCK_GROUP] = formatGroups(groups);
it('when delayed deletion is disabled, renders the delete immediately message', () => {
renderDeleteSuccessToast(MOCK_GROUP_NO_DELAY_DELETION);
it('calls toast correctly', () => {
renderDeleteSuccessToast(MOCK_GROUP);
expect(toast).toHaveBeenCalledWith(
`Group '${MOCK_GROUP_NO_DELAY_DELETION.fullName}' is being deleted.`,
);
});
expect(toast).toHaveBeenCalledWith(`Group '${MOCK_GROUP.fullName}' is being deleted.`);
it('when delayed deletion is enabled and group is not pending deletion, calls toast with pending deletion info', () => {
renderDeleteSuccessToast(MOCK_GROUP_WITH_DELAY_DELETION);
expect(toast).toHaveBeenCalledWith(
`Group '${MOCK_GROUP_WITH_DELAY_DELETION.fullName}' will be deleted on ${MOCK_GROUP_WITH_DELAY_DELETION.permanentDeletionDate}.`,
);
});
it('when delayed deletion is enabled and group is already pending deletion, renders the delete immediately message', () => {
renderDeleteSuccessToast(MOCK_GROUP_PENDING_DELETION);
expect(toast).toHaveBeenCalledWith(
`Group '${MOCK_GROUP_PENDING_DELETION.fullName}' is being deleted.`,
);
});
});
describe('deleteParams', () => {
it('returns {} always', () => {
expect(deleteParams()).toStrictEqual({});
it('when delayed deletion is disabled, returns an empty object', () => {
const res = deleteParams(MOCK_GROUP_NO_DELAY_DELETION);
expect(res).toStrictEqual({});
});
it('when delayed deletion is enabled and group is not pending deletion, returns an empty object', () => {
const res = deleteParams(MOCK_GROUP_WITH_DELAY_DELETION);
expect(res).toStrictEqual({});
});
it('when delayed deletion is enabled and group is already pending deletion, returns permanent deletion params', () => {
const res = deleteParams(MOCK_GROUP_PENDING_DELETION);
expect(res).toStrictEqual({
permanently_remove: true,
});
});
});

View File

@ -0,0 +1,5 @@
query mockQuery {
group(fullPath: "example") {
id
}
}

View File

@ -69,6 +69,7 @@ import {
groupWorkItemStateCountsQueryResponse,
workItemParentQueryResponse,
} from '../../mock_data';
import mockQuery from '../../graphql/mock_query.query.graphql';
jest.mock('~/lib/utils/scroll_utils', () => ({ scrollUp: jest.fn() }));
jest.mock('~/sentry/sentry_browser_wrapper');
@ -113,6 +114,7 @@ describeSkipVue3(skipReason, () => {
workItemsViewPreference = false,
workItemsToggleEnabled = true,
props = {},
additionalHandlers = [],
} = {}) => {
window.gon = {
...window.gon,
@ -130,6 +132,7 @@ describeSkipVue3(skipReason, () => {
[workItemParentQuery, workItemParentQueryHandler],
[setSortPreferenceMutation, sortPreferenceMutationResponse],
[workItemBulkUpdateMutation, workItemBulkUpdateHandler],
...additionalHandlers,
]),
provide: {
glFeatures: {
@ -304,6 +307,24 @@ describeSkipVue3(skipReason, () => {
}),
);
});
it('uses the eeEpicListQuery prop rather than the regular query', async () => {
const handler = jest.fn();
const mockEEQueryHandler = [mockQuery, handler];
mountComponent({
provide: {
workItemType: WORK_ITEM_TYPE_NAME_EPIC,
},
additionalHandlers: [mockEEQueryHandler],
props: {
eeEpicListQuery: mockQuery,
},
});
await waitForPromises();
expect(handler).toHaveBeenCalled();
});
});
describe('when there is an error fetching work items', () => {

View File

@ -322,7 +322,7 @@ RSpec.describe GitlabSchema.types['Group'], feature_category: :groups_and_projec
context 'with adjourned deletion disabled' do
before do
allow_next_found_instance_of(Group) do |group|
allow(group).to receive_messages(adjourned_deletion?: false, adjourned_deletion_configured?: false)
allow(group).to receive_messages(adjourned_deletion?: false)
end
end
@ -342,7 +342,7 @@ RSpec.describe GitlabSchema.types['Group'], feature_category: :groups_and_projec
context 'with adjourned deletion enabled' do
before do
allow_next_found_instance_of(Group) do |group|
allow(group).to receive_messages(adjourned_deletion?: true, adjourned_deletion_configured?: true)
allow(group).to receive_messages(adjourned_deletion?: true)
end
end
@ -361,18 +361,5 @@ RSpec.describe GitlabSchema.types['Group'], feature_category: :groups_and_projec
.to eq(::Gitlab::CurrentSettings.deletion_adjourned_period.days.since(Date.current).strftime('%F'))
end
end
context 'with adjourned deletion enabled globally' do
before do
allow_next_found_instance_of(Group) do |group|
allow(group).to receive_messages(adjourned_deletion?: false, adjourned_deletion_configured?: true)
end
end
it 'permanent_deletion_date returns correct date', :freeze_time do
expect(group_data[:permanent_deletion_date])
.to eq(::Gitlab::CurrentSettings.deletion_adjourned_period.days.since(Date.current).strftime('%F'))
end
end
end
end

View File

@ -46,8 +46,8 @@ RSpec.describe EventsHelper, factory_default: :keep, feature_category: :user_pro
let(:users_activity_page?) { true }
before do
allow(helper).to receive(:current_path?).and_call_original
allow(helper).to receive(:current_path?).with('users#activity').and_return(users_activity_page?)
allow(helper).to receive(:current_controller?).and_call_original
allow(helper).to receive(:current_controller?).with('users').and_return(users_activity_page?)
end
context 'when on users activity page' do
@ -87,8 +87,8 @@ RSpec.describe EventsHelper, factory_default: :keep, feature_category: :user_pro
let(:users_activity_page?) { true }
before do
allow(helper).to receive(:current_path?).and_call_original
allow(helper).to receive(:current_path?).with('users#activity').and_return(users_activity_page?)
allow(helper).to receive(:current_controller?).and_call_original
allow(helper).to receive(:current_controller?).with('users').and_return(users_activity_page?)
end
subject { helper.event_user_info(event) }
@ -564,8 +564,8 @@ RSpec.describe EventsHelper, factory_default: :keep, feature_category: :user_pro
let(:users_activity_page?) { true }
before do
allow(helper).to receive(:current_path?).and_call_original
allow(helper).to receive(:current_path?).with('users#activity').and_return(users_activity_page?)
allow(helper).to receive(:current_controller?).and_call_original
allow(helper).to receive(:current_controller?).with('users').and_return(users_activity_page?)
end
context 'when on the user activity page' do

View File

@ -59,7 +59,7 @@ RSpec.describe Gitlab::Graphql::QueryAnalyzers::AST::LoggerAnalyzer, feature_cat
it 'gracefully handles analysis errors', :aggregate_failures do
expect_next_instance_of(described_class::FIELD_USAGE_ANALYZER) do |instance|
# pretend it times out on a nested analyzer
expect(instance).to receive(:result).and_raise(Timeout::Error)
expect(instance).to receive(:result).and_raise(GraphQL::Analysis::TimeoutError)
end
results = GraphQL::Analysis::AST.analyze_query(query, [described_class], multiplex_analyzers: [])

View File

@ -10,6 +10,8 @@ RSpec.describe DraftNotes::PublishService, feature_category: :code_review_workfl
let(:commit) { project.commit(sample_commit.id) }
let(:internal) { false }
let(:executing_user) { nil }
let(:service) { described_class.new(merge_request, user) }
let(:todo_service) { instance_double(TodoService) }
let(:position) do
Gitlab::Diff::Position.new(
@ -21,8 +23,13 @@ RSpec.describe DraftNotes::PublishService, feature_category: :code_review_workfl
)
end
before do
allow(service).to receive(:todo_service).and_return(todo_service)
allow(todo_service).to receive(:new_review)
end
def publish(draft: nil)
DraftNotes::PublishService.new(merge_request, user).execute(draft: draft, executing_user: executing_user)
service.execute(draft: draft, executing_user: executing_user)
end
context 'single draft note' do
@ -143,9 +150,7 @@ RSpec.describe DraftNotes::PublishService, feature_category: :code_review_workfl
end
it 'resolves todos for the MR' do
expect_any_instance_of(TodoService) do |todo_service|
expect(todo_service).to receive(:new_review).with(kind_of(Review), user)
end
expect(todo_service).to receive(:new_review).with(merge_request, user)
publish
end
@ -255,6 +260,16 @@ RSpec.describe DraftNotes::PublishService, feature_category: :code_review_workfl
end
end
context 'with no draft notes' do
let(:merge_request) { create(:merge_request) }
it 'resolves todos for the merge request' do
expect(todo_service).to receive(:new_review).with(merge_request, user)
publish
end
end
context 'draft notes with suggestions' do
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }

View File

@ -78,10 +78,16 @@ RSpec.describe Groups::DestroyService, feature_category: :groups_and_projects do
it 'publishes a GroupDeletedEvent' do
expect { destroy_group(group, user, async) }
.to publish_event(Groups::GroupDeletedEvent)
.with(
group_id: group.id,
root_namespace_id: group.root_ancestor.id
)
.with(
group_id: group.id,
root_namespace_id: group.root_ancestor.id
)
.and publish_event(Groups::GroupDeletedEvent)
.with(
group_id: nested_group.id,
root_namespace_id: nested_group.root_ancestor.id,
parent_namespace_id: group.id
)
end
end
end
@ -102,6 +108,7 @@ RSpec.describe Groups::DestroyService, feature_category: :groups_and_projects do
before do
# Don't run Sidekiq to verify that group and projects are not actually destroyed
Sidekiq::Testing.fake! { destroy_group(group, user, true) }
Sidekiq::Testing.fake! { destroy_group(nested_group, user, true) }
end
it 'verifies original paths and projects still exist' do

View File

@ -1226,8 +1226,7 @@ RSpec.describe TodoService, feature_category: :notifications do
second_todo = create(:todo, :pending, :review_requested, user: john_doe, project: project, target: mentioned_mr, author: author)
third_todo = create(:todo, :pending, :mentioned, user: john_doe, project: project, target: mentioned_mr, author: author)
review = Review.new(merge_request: mentioned_mr)
service.new_review(review, john_doe)
service.new_review(mentioned_mr, john_doe)
expect(first_todo.reload).to be_done
expect(second_todo.reload).to be_done

View File

@ -510,7 +510,8 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'BulkImports::RelationExportWorker' => 6,
'Ci::Runners::ExportUsageCsvWorker' => 3,
'AppSec::ContainerScanning::ScanImageWorker' => 3,
'Ci::DestroyOldPipelinesWorker' => 0
'Ci::DestroyOldPipelinesWorker' => 0,
'AuditEvents::AuditEventStreamingWorker' => 3
}.merge(extra_retry_exceptions)
end

View File

@ -1,24 +0,0 @@
include:
- local: gems/gem.gitlab-ci.yml
inputs:
gem_name: "graphql"
gem_path_prefix: "vendor/gems/"
rspec:
extends: .default
before_script:
- apt-get update -qq
- apt-get install -qq -y cmake
- cmake --version
- cd vendor/gems/graphql
- ruby -v # Print out ruby version for debugging
- gem update --system
- bundle_version=$(grep -A 1 "BUNDLED WITH" Gemfile.lock | tail -n 1 | sed -e 's/[[:space:]]//')
- gem install bundler --version "$bundle_version" --no-document # Bundler is not installed with the image
- bundle config # Show bundler configuration
- bundle install --jobs=$(nproc) --retry=3
script:
- bundle exec rake test
parallel:
matrix:
- RUBY_VERSION: ["${RUBY_VERSION_DEFAULT}", "${RUBY_VERSION_NEXT}"]

View File

@ -1,5 +0,0 @@
--no-private
--markup=markdown
--readme=readme.md
--title='GraphQL Ruby API Documentation'
'lib/**/*.rb' - '*.md'

View File

@ -1,198 +0,0 @@
# graphql-enterprise
### Breaking Changes
### Deprecations
### New Features
### Bug Fix
# 1.5.6 (13 Dec 2024)
- ObjectCache: Add `CacheableRelation` helper for top-level ActiveRecord relations
# 1.5.5 (10 Dec 2024)
- Changesets: Add missing `ensure_loaded` call for class-based changesets
# 1.5.4 (31 Oct 2024)
- ObjectCache: Add `reauthorize_cached_objects: false`
# 1.5.3 (1 Oct 2024)
- Limiters: Add expiration to rate limit data (to reduce Redis footprint)
# 1.5.2 (6 Sept 2024)
- Limiters: Add `connection_pool:` support
# 1.5.1 (30 Aug 2024)
- ObjectCache: Add `connection_pool:` support
# 1.5.0 (26 Jul 2024)
- ObjectCache: Add Dalli backend for Memcached
# 1.4.2 (11 Jun 2024)
- ObjectCache: Add `Schema.fingerprint` hook and `context[:refresh_object_cache]`
# 1.4.1 (30 May 2024)
- ObjectCache: properly handle when object fingerprints are evicted but the cached result wasn't
# 1.4.0 (11 Apr 2024)
- ObjectCache: add support for `redis_cluster: ...` backend
# 1.3.4 (18 Mar 2024)
- ObjectCache: use new `trace_with` API for instrumentation
# 1.3.3 (30 Jan 2024)
- ObjectCache: fix compatibility with `run_graphql_field` test helper #4816
# 1.3.2 (15 Jan 2024)
### Bug Fix
- Limiters: Migrate to new `trace_with` instrumentation API, requires GraphQL-Ruby 2.0.18+
# 1.3.1 (12 June 2023)
### Bug Fix
- Add missing `require "graphql"` #4511
# 1.3.0 (29 May 2023)
### New Features
- Changesets: Add `added_in: ...` and `removed_in: ...` for inline definition changes
# 1.2.0 (10 February 2023)
### New Features
- Support the `redis-client` gem as `redis:` (requires graphql-pro 1.24.0+)
# 1.1.14 (3 November 2022)
### New Features
- Limiters: Support `dashboard_charts: false` to disable built-in instrumentation
- Limiters: Support `assign_as:` to use a different accessor method for storing limiter instances on schema classes (add a corresponding `class << self; attr_accessor ...; end` to the schema class to use it)
- Limiters: Support `context_key:` to put runtime info in a different key in query context
- Runtime Limiter: Add `window_ms:` to runtime info
# 1.1.13 (21 October 2022)
### Bug Fix
- Limiter: handle missing fields in MutationLimiter
# 1.1.12 (18 October 2022)
### New Features
- Limiters: add MutationLimiter
### Bug Fix
- ObjectCache: Update Redis calls to support redis-rb 5.0
# 1.1.11 (25 August 2022)
### Bug Fix
- ObjectCache: also update `delete` to handle more than 1000 objects in Lua
# 1.1.10 (19 August 2022)
### Bug Fix
- ObjectCache: read and write objects 1000-at-a-time to avoid overloading Lua scripts in Redis
# 1.1.9 (3 August 2022)
### New Features
- ObjectCache: Add a message to context when a type or field causes a query to be treated as "private"
### Bug Fix
- ObjectCache: skip the query analyzer when `context[:skip_object_cache]` is present
# 1.1.8 (1 August 2022)
### New Features
- ObjectCache: Add `ObjectType.cache_dependencies_for(object, context)` to customize dependencies for an object
### Bug Fix
- ObjectCache: Fix to make `context[:object_cache][:objects]` a Set
# 1.1.7 (28 July 2022)
### Bug Fix
- ObjectCache: remove needless `resolve_type` calls
# 1.1.6 (28 July 2022)
### Bug Fix
- ObjectCache: persist the type names of cached objects, pass them to `Schema.resolve_type` when validating cached responses.
# 1.1.5 (22 July 2022)
### New Features
- ObjectCache: add `cache_introspection: { ttl: ... }` for setting an expiration (in seconds) on introspection fields.
# 1.1.4 (19 March 2022)
### Bug Fix
- ObjectCache: don't create a cache fingerprint if the query is found to be uncacheable during analysis.
# 1.1.3 (3 March 2022)
### Bug Fix
- Changesets: Return an empty set when a schema doesn't use changesets #3972
# 1.1.2 (1 March 2022)
### New Features
- Changesets: Add introspection methods `Schema.changesets` and `Changeset.changes`
# 1.1.1 (14 February 2021)
### Bug Fix
- Changesets: don't require `context.schema` for plain-Ruby calls to introspection methods #3929
# 1.1.0 (24 November 2021)
### New Features
- Changesets: Add `GraphQL::Enterprise::Changeset`
# 1.0.1 (9 November 2021)
### Bug Fix
- Object Cache: properly handle invalid queries #3703
# 1.0.0 (13 October 2021)
### New Features
- Rate limiters: first release
- Object cache: first release

File diff suppressed because it is too large Load Diff

View File

@ -1,176 +0,0 @@
# graphql-relay
### Breaking Changes
### Deprecations
### New Features
### Bug Fix
## 0.12.0 (21 Jul 2016)
### Breaking Changes
- Don't cache a global node identification config #51
To migrate, assign your node identification helper to the schema:
```ruby
NodeIdentification = GraphQL::Relay::GlobalNodeIdentification.define { ... }
MySchema.node_identification = NodeIdentification
```
### New Features
- Support lazy definition blocks from graphql-ruby 0.17
- Add `startCursor` and `endCursor` to `PageInfo` #60
### Bug Fix
- Support `field:` keyword for connection helper #58
## 0.11.2 (6 Jul 2016)
### New Features
- Include description for built-in objects #55
## 0.11.1 (24 Jun 2016)
### Bug Fix
- Correctly pass parent object to Connections #53
## 0.11.0 (19 Jun 2016)
### Breaking Changes
- `BaseType.define_connection` no longer caches the result to use as the default `BaseType.connection_type`. Now, store the result of `.define_connection` in a variable and pass that variable into the schema:
```ruby
# Capture the returned type:
SomethingCustomConnectionType = SomethingType.define_connection { ... }
DifferentThingType = GraphQL::ObjectType.define do
# And pass it to the connection helper:
connection :somethings, SomethingCustomConnectionType
end
```
### New Features
- Support for custom edge types / classes #50
- Support for multiple connection classes #50
## 0.10.0 (31 May 2016)
### New Feature
- Support `graphql` 0.14.0 #47
### Bug Fix
- Use strings as argument names, not symbols #47
## 0.9.5
### New Feature
- Root `id` field may have a description #43
## 0.9.4 (29 Apr 2016)
### Bug Fix
- Fix Node interface to support GraphQL 0.13.0+
## 0.9.2 (29 Apr 2016)
### Bug Fix
- Fix Node interface when type_from_object returns nil
## 0.9.1 (6 Apr 2016)
### Bug Fix
- Respond to connection fields without any pagination arguments
- Limit by `max_page_size` even when no arguments are present
## 0.9.0 (30 Mar 2016)
### Breaking change
- Remove the `order` argument from connection fields. This isn't part of the spec and shouldn't have been there in the first place!
You can implement this behavior with a custom argument, for example:
```ruby
field :cities, CityType.connection_type do
argument :order, types.String, default_value: "name"
resolve ->(obj, args, ctx) {
obj.order(args[:order])
}
end
```
### Bug Fix
- Include the MIT license in the project's source
## 0.8.1 (22 Mar 2016)
### Bug Fix
- Accept description for Mutations
## 0.8.0 (20 Mar 2016)
### New Feature
- Accept configs for `to_global_id` and `from_global_id`
- Support `graphql` 0.12+
## 0.7.1 (29 Feb 2016)
### Bug Fix
- Limit the `count(*)` when testing next page with ActiveRecord #28
## 0.7.0 (20 Feb 2016)
### New Feature
- `max_page_size` option for connections
- Support ActiveSupport 5.0.0.beta2
## 0.6.2 (11 Feb 2016)
### Bug Fix
- Correctly cast values from connection cursors #21
- Use class _name_ instead of class _object_ when finding a connection implementation (to support Rails autoloading) #16
## 0.6.1 (14 Dec 2015)
### Bug Fix
- Stringify `id` when passed into `to_global_id`
## 0.6.0 (11 Dec 2015)
### Breaking Change
- `GlobalNodeIdentification#object_from_id(id, ctx)` now accepts context as the second argument #9
## 0.5.1 (11 Dec 2015)
### Feature
- Allow custom UUID join string #15
### Bug Fix
- Remove implicit ActiveSupport dependency #14

File diff suppressed because it is too large Load Diff

View File

@ -1 +0,0 @@
graphql-ruby.org

View File

@ -1,25 +0,0 @@
# frozen_string_literal: true
source "https://rubygems.org"
gemspec
gem 'bootsnap' # required by the Rails apps generated in tests
gem 'stackprof', platform: :ruby
gem 'pry'
gem 'pry-stack_explorer', platform: :ruby
gem 'pry-byebug'
if RUBY_VERSION >= "3.0"
gem "libev_scheduler"
gem "evt"
end
if RUBY_VERSION >= "3.1.1"
gem "async", "~>2.0"
end
# Required for running `jekyll algolia ...` (via `rake site:update_search_index`)
group :jekyll_plugins do
gem 'jekyll-algolia', '~> 1.0'
gem 'jekyll-redirect-from'
end

View File

@ -1,324 +0,0 @@
PATH
remote: .
specs:
graphql (2.4.11)
base64
fiber-storage
logger
GEM
remote: https://rubygems.org/
specs:
addressable (2.8.7)
public_suffix (>= 2.0.2, < 7.0)
algolia_html_extractor (2.6.4)
json (~> 2.0)
nokogiri (~> 1.10)
algoliasearch (1.27.5)
httpclient (~> 2.8, >= 2.8.3)
json (>= 1.5.1)
ansi (1.5.0)
ast (2.4.2)
async (2.23.0)
console (~> 1.29)
fiber-annotation
io-event (~> 1.9)
metrics (~> 0.12)
traces (~> 0.15)
base64 (0.2.0)
benchmark-ips (2.14.0)
bigdecimal (3.1.9)
binding_of_caller (1.0.1)
debug_inspector (>= 1.2.0)
bootsnap (1.18.4)
msgpack (~> 1.2)
builder (3.3.0)
byebug (11.1.3)
coderay (1.1.3)
colorator (1.1.0)
concurrent-ruby (1.3.5)
console (1.30.0)
fiber-annotation
fiber-local (~> 1.1)
json
csv (3.3.2)
debug_inspector (1.2.0)
docile (1.4.1)
em-websocket (0.5.3)
eventmachine (>= 0.12.9)
http_parser.rb (~> 0)
eventmachine (1.2.7)
evt (0.4.0)
faraday (2.12.2)
faraday-net_http (>= 2.0, < 3.5)
json
logger
faraday-net_http (3.4.0)
net-http (>= 0.5.0)
ffi (1.17.1-aarch64-linux-gnu)
ffi (1.17.1-aarch64-linux-musl)
ffi (1.17.1-arm-linux-gnu)
ffi (1.17.1-arm-linux-musl)
ffi (1.17.1-arm64-darwin)
ffi (1.17.1-x86_64-darwin)
ffi (1.17.1-x86_64-linux-gnu)
ffi (1.17.1-x86_64-linux-musl)
fiber-annotation (0.2.0)
fiber-local (1.1.0)
fiber-storage
fiber-storage (1.0.0)
filesize (0.2.0)
forwardable-extended (2.6.0)
gitlab (4.20.1)
httparty (~> 0.20)
terminal-table (>= 1.5.1)
google-protobuf (4.30.0)
bigdecimal
rake (>= 13)
google-protobuf (4.30.0-aarch64-linux)
bigdecimal
rake (>= 13)
google-protobuf (4.30.0-arm64-darwin)
bigdecimal
rake (>= 13)
google-protobuf (4.30.0-x86_64-darwin)
bigdecimal
rake (>= 13)
google-protobuf (4.30.0-x86_64-linux)
bigdecimal
rake (>= 13)
graphql-batch (0.6.0)
graphql (>= 1.12.18, < 3)
promise.rb (~> 0.7.2)
http_parser.rb (0.8.0)
httparty (0.22.0)
csv
mini_mime (>= 1.0.0)
multi_xml (>= 0.5.2)
httpclient (2.9.0)
mutex_m
i18n (1.14.7)
concurrent-ruby (~> 1.0)
imagen (0.2.0)
parser (>= 2.5, != 2.5.1.1)
io-event (1.9.0)
jekyll (4.4.1)
addressable (~> 2.4)
base64 (~> 0.2)
colorator (~> 1.0)
csv (~> 3.0)
em-websocket (~> 0.5)
i18n (~> 1.0)
jekyll-sass-converter (>= 2.0, < 4.0)
jekyll-watch (~> 2.0)
json (~> 2.6)
kramdown (~> 2.3, >= 2.3.1)
kramdown-parser-gfm (~> 1.0)
liquid (~> 4.0)
mercenary (~> 0.3, >= 0.3.6)
pathutil (~> 0.9)
rouge (>= 3.0, < 5.0)
safe_yaml (~> 1.0)
terminal-table (>= 1.8, < 4.0)
webrick (~> 1.7)
jekyll-algolia (1.7.1)
algolia_html_extractor (~> 2.6)
algoliasearch (~> 1.26)
filesize (~> 0.1)
jekyll (>= 3.6, < 5.0)
json (~> 2.0)
nokogiri (~> 1.6)
progressbar (~> 1.9)
verbal_expressions (~> 0.1.5)
jekyll-redirect-from (0.16.0)
jekyll (>= 3.3, < 5.0)
jekyll-sass-converter (2.2.0)
sassc (> 2.0.1, < 3.0)
jekyll-watch (2.2.1)
listen (~> 3.0)
json (2.10.1)
kramdown (2.5.1)
rexml (>= 3.3.9)
kramdown-parser-gfm (1.1.0)
kramdown (~> 2.0)
language_server-protocol (3.17.0.4)
libev_scheduler (0.2)
lint_roller (1.1.0)
liquid (4.0.4)
listen (3.9.0)
rb-fsevent (~> 0.10, >= 0.10.3)
rb-inotify (~> 0.9, >= 0.9.10)
logger (1.6.6)
m (1.5.1)
method_source (>= 0.6.7)
rake (>= 0.9.2.2)
memory_profiler (1.1.0)
mercenary (0.4.0)
method_source (1.1.0)
metrics (0.12.1)
mini_mime (1.1.5)
minitest (5.25.4)
minitest-focus (1.4.0)
minitest (>= 4, < 6)
minitest-reporters (1.7.1)
ansi
builder
minitest (>= 5.0)
ruby-progressbar
msgpack (1.8.0)
multi_xml (0.7.1)
bigdecimal (~> 3.1)
mutex_m (0.3.0)
net-http (0.6.0)
uri
nokogiri (1.18.3-aarch64-linux-gnu)
racc (~> 1.4)
nokogiri (1.18.3-aarch64-linux-musl)
racc (~> 1.4)
nokogiri (1.18.3-arm-linux-gnu)
racc (~> 1.4)
nokogiri (1.18.3-arm-linux-musl)
racc (~> 1.4)
nokogiri (1.18.3-arm64-darwin)
racc (~> 1.4)
nokogiri (1.18.3-x86_64-darwin)
racc (~> 1.4)
nokogiri (1.18.3-x86_64-linux-gnu)
racc (~> 1.4)
nokogiri (1.18.3-x86_64-linux-musl)
racc (~> 1.4)
octokit (9.2.0)
faraday (>= 1, < 3)
sawyer (~> 0.9)
parallel (1.26.3)
parser (3.3.7.1)
ast (~> 2.4.1)
racc
pathutil (0.16.2)
forwardable-extended (~> 2.6)
progressbar (1.13.0)
promise.rb (0.7.4)
pronto (0.11.3)
gitlab (>= 4.4.0, < 5.0)
httparty (>= 0.13.7, < 1.0)
octokit (>= 4.7.0, < 10.0)
rainbow (>= 2.2, < 4.0)
rexml (>= 3.2.5, < 4.0)
rugged (>= 0.23.0, < 2.0)
thor (>= 0.20.3, < 2.0)
pronto-undercover (0.2.0)
pronto (>= 0.9, < 0.12)
undercover (~> 0.4.3)
pry (0.14.2)
coderay (~> 1.1)
method_source (~> 1.0)
pry-byebug (3.10.1)
byebug (~> 11.0)
pry (>= 0.13, < 0.15)
pry-stack_explorer (0.6.1)
binding_of_caller (~> 1.0)
pry (~> 0.13)
public_suffix (6.0.1)
racc (1.8.1)
rainbow (3.1.1)
rake (13.2.1)
rake-compiler (1.2.9)
rake
rb-fsevent (0.11.2)
rb-inotify (0.11.1)
ffi (~> 1.0)
regexp_parser (2.10.0)
rexml (3.4.1)
rouge (4.5.1)
rubocop (1.73.2)
json (~> 2.3)
language_server-protocol (~> 3.17.0.2)
lint_roller (~> 1.1.0)
parallel (~> 1.10)
parser (>= 3.3.0.2)
rainbow (>= 2.2.2, < 4.0)
regexp_parser (>= 2.9.3, < 3.0)
rubocop-ast (>= 1.38.0, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 4.0)
rubocop-ast (1.38.1)
parser (>= 3.3.1.0)
ruby-progressbar (1.13.0)
rugged (1.6.5)
safe_yaml (1.0.5)
sassc (2.4.0)
ffi (~> 1.9)
sawyer (0.9.2)
addressable (>= 2.3.5)
faraday (>= 0.17.3, < 3)
simplecov (0.22.0)
docile (~> 1.1)
simplecov-html (~> 0.11)
simplecov_json_formatter (~> 0.1)
simplecov-html (0.13.1)
simplecov-lcov (0.8.0)
simplecov_json_formatter (0.1.4)
stackprof (0.2.27)
terminal-table (3.0.2)
unicode-display_width (>= 1.1.1, < 3)
thor (1.3.2)
traces (0.15.2)
undercover (0.4.7)
imagen (>= 0.1.8)
rainbow (>= 2.1, < 4.0)
rugged (>= 0.27, < 1.7)
unicode-display_width (2.6.0)
uri (1.0.3)
verbal_expressions (0.1.5)
webrick (1.9.1)
yard (0.9.37)
PLATFORMS
aarch64-linux
aarch64-linux-gnu
aarch64-linux-musl
arm-linux-gnu
arm-linux-musl
arm64-darwin
x86_64-darwin
x86_64-linux
x86_64-linux-gnu
x86_64-linux-musl
DEPENDENCIES
async (~> 2.0)
benchmark-ips
bootsnap
concurrent-ruby (~> 1.0)
evt
google-protobuf
graphql!
graphql-batch
jekyll
jekyll-algolia (~> 1.0)
jekyll-redirect-from
jekyll-sass-converter (~> 2.2)
libev_scheduler
m (~> 1.5.0)
memory_profiler
minitest
minitest-focus
minitest-reporters
mutex_m
pronto
pronto-undercover
pry
pry-byebug
pry-stack_explorer
rake
rake-compiler
rubocop
simplecov
simplecov-lcov
stackprof
undercover
webrick
yard
BUNDLED WITH
2.6.5

View File

@ -1,20 +0,0 @@
Copyright 2015 Robert Mosolgo
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,240 +0,0 @@
# frozen_string_literal: true
require "bundler/gem_helper"
Bundler::GemHelper.install_tasks
require "rake/testtask"
require_relative "guides/_tasks/site"
require_relative "lib/graphql/rake_task/validate"
require 'rake/extensiontask'
Rake::TestTask.new do |t|
t.libs << "spec" << "lib" << "graphql-c_parser/lib"
exclude_integrations = []
['mongoid', 'rails'].each do |integration|
begin
require integration
rescue LoadError
exclude_integrations << integration
end
end
t.test_files = FileList.new("spec/**/*_spec.rb") do |fl|
fl.exclude(*exclude_integrations.map { |int| "spec/integration/#{int}/**/*" })
end
# After 2.7, there were not warnings for uninitialized ivars anymore
if RUBY_VERSION < "3"
t.warning = false
end
end
require 'rubocop/rake_task'
RuboCop::RakeTask.new
default_tasks = [:test, :rubocop]
if ENV["SYSTEM_TESTS"]
task(default: ["test:system"] + default_tasks)
else
task(default: default_tasks)
end
def assert_dependency_version(dep_name, required_version, check_script)
version = `#{check_script}`
if !version.include?(required_version)
raise <<-ERR
build_parser requires #{dep_name} version "#{required_version}", but found:
$ #{check_script}
> #{version}
To fix this issue:
- Update #{dep_name} to the required version
- Update the assertion in `Rakefile` to match the current version
ERR
end
end
namespace :bench do
def prepare_benchmark
$LOAD_PATH << "./lib" << "./spec/support"
require_relative("./benchmark/run.rb")
end
desc "Benchmark parsing"
task :parse do
prepare_benchmark
GraphQLBenchmark.run("parse")
end
desc "Benchmark lexical analysis"
task :scan do
prepare_benchmark
GraphQLBenchmark.run("scan")
end
desc "Benchmark the introspection query"
task :query do
prepare_benchmark
GraphQLBenchmark.run("query")
end
desc "Benchmark validation of several queries"
task :validate do
prepare_benchmark
GraphQLBenchmark.run("validate")
end
desc "Profile a validation"
task :validate_memory do
prepare_benchmark
GraphQLBenchmark.validate_memory
end
desc "Generate a profile of the introspection query"
task :profile do
prepare_benchmark
GraphQLBenchmark.profile
end
desc "Run benchmarks on a very large result"
task :profile_large_result do
prepare_benchmark
GraphQLBenchmark.profile_large_result
end
desc "Run benchmarks on a small result"
task :profile_small_result do
prepare_benchmark
GraphQLBenchmark.profile_small_result
end
desc "Run introspection on a small schema"
task :profile_small_introspection do
prepare_benchmark
GraphQLBenchmark.profile_small_introspection
end
desc "Dump schema to SDL"
task :profile_to_definition do
prepare_benchmark
GraphQLBenchmark.profile_to_definition
end
desc "Load schema from SDL"
task :profile_from_definition do
prepare_benchmark
GraphQLBenchmark.profile_from_definition
end
desc "Compare GraphQL-Batch and GraphQL-Dataloader"
task :profile_batch_loaders do
prepare_benchmark
GraphQLBenchmark.profile_batch_loaders
end
desc "Run benchmarks on schema creation"
task :profile_boot do
prepare_benchmark
GraphQLBenchmark.profile_boot
end
desc "Check the memory footprint of a large schema"
task :profile_schema_memory_footprint do
prepare_benchmark
GraphQLBenchmark.profile_schema_memory_footprint
end
desc "Check the depth of the stacktrace during execution"
task :profile_stack_depth do
prepare_benchmark
GraphQLBenchmark.profile_stack_depth
end
desc "Run a very big introspection query"
task :profile_large_introspection do
prepare_benchmark
GraphQLBenchmark.profile_large_introspection
end
task :profile_small_query_on_large_schema do
prepare_benchmark
GraphQLBenchmark.profile_small_query_on_large_schema
end
desc "Run analysis on a big query"
task :profile_large_analysis do
prepare_benchmark
GraphQLBenchmark.profile_large_analysis
end
desc "Run analysis on parsing"
task :profile_parse do
prepare_benchmark
GraphQLBenchmark.profile_parse
end
end
namespace :test do
desc "Run system tests for ActionCable subscriptions"
task :system do
success = Dir.chdir("spec/dummy") do
system("bundle install")
system("bundle exec bin/rails test:system")
end
success || abort
end
task js: "js:test"
end
namespace :js do
client_dir = "./javascript_client"
desc "Run the tests for javascript_client"
task :test do
success = Dir.chdir(client_dir) do
system("yarn run test")
end
success || abort
end
desc "Install JS dependencies"
task :install do
Dir.chdir(client_dir) do
system("yarn install")
end
end
desc "Compile TypeScript to JavaScript"
task :build do
Dir.chdir(client_dir) do
system("yarn tsc")
end
end
task all: [:install, :build, :test]
end
task :build_c_lexer do
assert_dependency_version("Ragel", "7.0.4", "ragel -v")
`ragel -F1 graphql-c_parser/ext/graphql_c_parser_ext/lexer.rl`
end
Rake::ExtensionTask.new("graphql_c_parser_ext") do |t|
t.ext_dir = 'graphql-c_parser/ext/graphql_c_parser_ext'
t.lib_dir = "graphql-c_parser/lib/graphql"
end
task :build_yacc_parser do
assert_dependency_version("Bison", "3.8", "yacc --version")
`yacc graphql-c_parser/ext/graphql_c_parser_ext/parser.y -o graphql-c_parser/ext/graphql_c_parser_ext/parser.c -Wyacc`
end
task :move_binary do
# For some reason my local env doesn't respect the `lib_dir` configured above
`mv graphql-c_parser/lib/*.bundle graphql-c_parser/lib/graphql`
end
desc "Build the C Extension"
task build_ext: [:build_c_lexer, :build_yacc_parser, "compile:graphql_c_parser_ext", :move_binary]

View File

@ -1,41 +0,0 @@
query AbstractFragments {
node(id: "1") {
...Frag1
}
}
fragment Frag1 on Commentable {
id
__typename
...Frag2
}
fragment Frag2 on Commentable {
id
__typename
...Frag3
}
fragment Frag3 on Commentable {
id
__typename
...Frag4
}
fragment Frag4 on Commentable {
id
__typename
...Frag5
}
fragment Frag5 on Commentable {
id
__typename
...Frag6
}
fragment Frag6 on Commentable {
comments {
body
}
}

View File

@ -1,64 +0,0 @@
query AbstractFragments {
node(id: "1") {
...Frag1
}
}
fragment Frag1 on Commentable {
id
__typename
...Frag9
...Frag2
}
fragment Frag2 on Commentable {
id
__typename
...Frag9
...Frag3
}
fragment Frag3 on Commentable {
id
__typename
...Frag9
...Frag4
}
fragment Frag4 on Commentable {
id
__typename
...Frag9
...Frag5
}
fragment Frag5 on Commentable {
id
__typename
...Frag9
...Frag6
}
fragment Frag6 on Commentable {
...Frag7
...Frag9
name
id
comments {
...Frag8
...Frag7
id
}
}
fragment Frag7 on Node {
id
}
fragment Frag8 on Comment {
body
}
fragment Frag9 on Named {
name
}

View File

@ -1,138 +0,0 @@
# frozen_string_literal: true
module BatchLoading
class GraphQLBatchSchema < GraphQL::Schema
DATA = [
{ id: "1", name: "Bulls", player_ids: ["2", "3"] },
{ id: "2", name: "Michael Jordan", team_id: "1" },
{ id: "3", name: "Scottie Pippin", team_id: "1" },
{ id: "4", name: "Braves", player_ids: ["5", "6"] },
{ id: "5", name: "Chipper Jones", team_id: "4" },
{ id: "6", name: "Tom Glavine", team_id: "4" },
]
class DataLoader < GraphQL::Batch::Loader
def initialize(column: :id)
@column = column
end
def perform(keys)
keys.each do |key|
record = DATA.find { |d| d[@column] == key }
fulfill(key, record)
end
end
end
class Team < GraphQL::Schema::Object
field :name, String, null: false
field :players, "[BatchLoading::GraphQLBatchSchema::Player]", null: false
def players
DataLoader.load_many(object[:player_ids])
end
end
class Player < GraphQL::Schema::Object
field :name, String, null: false
field :team, Team, null: false
def team
DataLoader.load(object[:team_id])
end
end
class Query < GraphQL::Schema::Object
field :team, Team do
argument :name, String
end
def team(name:)
DataLoader.for(column: :name).load(name)
end
end
query(Query)
use GraphQL::Batch
end
class GraphQLDataloaderSchema < GraphQL::Schema
class DataSource < GraphQL::Dataloader::Source
def initialize(options = {column: :id})
@column = options[:column]
end
def fetch(keys)
keys.map { |key|
d = GraphQLBatchSchema::DATA.find { |d| d[@column] == key }
# p [key, @column, d]
d
}
end
end
class Team < GraphQL::Schema::Object
field :name, String, null: false
field :players, "[BatchLoading::GraphQLDataloaderSchema::Player]", null: false
def players
dataloader.with(DataSource).load_all(object[:player_ids])
end
end
class Player < GraphQL::Schema::Object
field :name, String, null: false
field :team, Team, null: false
def team
dataloader.with(DataSource).load(object[:team_id])
end
end
class Query < GraphQL::Schema::Object
field :team, Team do
argument :name, String
end
def team(name:)
dataloader.with(DataSource, column: :name).load(name)
end
end
query(Query)
use GraphQL::Dataloader
end
class GraphQLNoBatchingSchema < GraphQL::Schema
DATA = GraphQLBatchSchema::DATA
class Team < GraphQL::Schema::Object
field :name, String, null: false
field :players, "[BatchLoading::GraphQLNoBatchingSchema::Player]", null: false
def players
object[:player_ids].map { |id| DATA.find { |d| d[:id] == id } }
end
end
class Player < GraphQL::Schema::Object
field :name, String, null: false
field :team, Team, null: false
def team
DATA.find { |d| d[:id] == object[:team_id] }
end
end
class Query < GraphQL::Schema::Object
field :team, Team do
argument :name, String
end
def team(name:)
DATA.find { |d| d[:name] == name }
end
end
query(Query)
end
end

View File

@ -1,476 +0,0 @@
query Anc_inbox_layout($first_0:Int!,$first_5:Int!,$first_6:Int!,$first_a:Int!,$first_c:Int!,$order_by_1:ReportOrderInput!,$substate_2:ReportStateEnum!,$pre_submission_review_states_3:[ReportPreSubmissionReviewStateEnum]!,$size_4:ProfilePictureSizes!,$size_9:ProfilePictureSizes!,$not_types_7:[ActivityTypes]!,$order_by_8:ActivityOrderInput!,$order_by_b:TeamOrderInput!) {
query {
id,
...FE
}
}
fragment F0 on User {
username,
_profile_picturePkPpF:profile_picture(size:$size_4),
impact,
reputation,
signal,
id
}
fragment F1 on Report {
reporter {
username,
_profile_picturePkPpF:profile_picture(size:$size_4),
impact,
reputation,
signal,
_duplicate_users2Nhzxe:duplicate_users(first:$first_5) {
pageInfo {
hasNextPage,
hasPreviousPage
},
total_count,
edges {
node {
id,
...F0
},
cursor
}
},
id
},
id
}
fragment F2 on Report {
id
}
fragment F3 on Node {
id,
__typename
}
fragment F4 on ReportActivityInterface {
automated_response,
genius_execution_id,
report {
team {
handle,
id
},
id
},
__typename,
...F3
}
fragment F5 on Team {
url,
internet_bug_bounty,
_profile_pictureihzmG:profile_picture(size:$size_9),
name,
id
}
fragment F6 on User {
username,
url,
_profile_pictureihzmG:profile_picture(size:$size_9),
id
}
fragment F7 on ActivitiesBugDuplicate {
original_report_id,
id
}
fragment F8 on ActivitiesReferenceIdAdded {
reference,
reference_url,
id
}
fragment F9 on ActivitiesCveIdAdded {
cve_ids,
id
}
fragment Fa on ActivitiesAgreedOnGoingPublic {
first_to_agree,
id
}
fragment Fb on ActivitiesBugCloned {
original_report_id,
id
}
fragment Fc on ActivitiesUserAssignedToBug {
assigned_user {
url,
username,
id
},
id
}
fragment Fd on ActivitiesGroupAssignedToBug {
assigned_group {
name,
id
},
id
}
fragment Fe on ActivitiesExternalUserInvited {
email,
id
}
fragment Ff on ActivitiesExternalUserInvitationCancelled {
email,
id
}
fragment Fg on ActivitiesExternalUserRemoved {
removed_user {
id
},
id
}
fragment Fh on ActivitiesUserBannedFromProgram {
removed_user {
id
},
id
}
fragment Fi on ActivitiesBountyAwarded {
bounty_amount,
bounty_currency,
bonus_amount,
report {
reporter {
username,
url,
id
},
id
},
id
}
fragment Fj on ActivitiesBountySuggested {
bounty_amount,
bounty_currency,
bonus_amount,
id
}
fragment Fk on ActivitiesBugResolved {
report {
reporter {
username,
url,
id
},
id
},
id
}
fragment Fl on ActivitiesSwagAwarded {
report {
reporter {
username,
url,
id
},
id
},
id
}
fragment Fm on ActivitiesChangedScope {
old_scope {
asset_identifier,
id
},
new_scope {
asset_identifier,
id
},
id
}
fragment Fn on ActivityInterface {
_id,
internal,
i_can_edit,
__typename,
message,
markdown_message,
created_at,
updated_at,
actor {
__typename,
...F5,
...F6,
...F3
},
attachments {
_id,
file_name,
content_type,
expiring_url,
id
},
...F7,
...F8,
...F9,
...Fa,
...Fb,
...Fc,
...Fd,
...Fe,
...Ff,
...Fg,
...Fh,
...Fi,
...Fj,
...Fk,
...Fl,
...Fm,
...F3
}
fragment Fo on User {
username,
url,
__typename,
id
}
fragment Fp on TeamMemberGroup {
name,
__typename,
id
}
fragment Fq on Report {
_id,
url,
title,
state,
substate,
created_at,
assignee {
__typename,
...Fo,
...Fp,
...F3
},
cloned_from {
_id,
id
},
reporter {
username,
url,
id
},
team {
_id,
url,
handle,
name,
twitter_handle,
website,
about,
offers_bounties,
id
},
id
}
fragment Fr on Report {
state,
stage,
disclosed_at,
cve_ids,
singular_disclosure_disabled,
disclosed_at,
bug_reporter_agreed_on_going_public_at,
team_member_agreed_on_going_public_at,
comments_closed,
mediation_requested_at,
vulnerability_information,
vulnerability_information_html,
reporter {
disabled,
username,
url,
_profile_picture2g6hJa:profile_picture(size:$size_4),
id
},
weakness {
id,
name
},
original_report {
id,
url
},
attachments {
_id,
file_name,
expiring_url,
content_type,
id
},
allow_singular_disclosure_at,
allow_singular_disclosure_after,
singular_disclosure_allowed,
severity {
rating,
score,
author_type,
id
},
structured_scope {
_id,
asset_type,
asset_identifier,
max_severity,
id
},
_activities4z6spP:activities(first:$first_6,not_types:$not_types_7,order_by:$order_by_8) {
edges {
node {
__typename,
...F4,
...Fn,
...F3
},
cursor
},
pageInfo {
hasNextPage,
hasPreviousPage
}
},
id,
...Fq
}
fragment Fs on Report {
id,
...Fr
}
fragment Ft on Report {
title,
id
}
fragment Fu on Report {
_id,
pre_submission_review_state,
i_can_anc_review,
reporter {
username,
id
},
team {
handle,
id
},
id,
...F2
}
fragment Fv on Report {
team {
policy_html,
id
},
structured_scope {
asset_identifier,
asset_type,
instruction,
id
},
id
}
fragment Fw on Report {
weakness {
name,
id
},
id
}
fragment Fx on Report {
severity {
rating,
score,
id
},
id
}
fragment Fy on Report {
latest_activity_at,
created_at,
id,
...Fq
}
fragment Fz on Query {
me {
username,
_teamsWbVmT:teams(order_by:$order_by_b,first:$first_c) {
edges {
node {
name,
handle,
id
},
cursor
},
pageInfo {
hasNextPage,
hasPreviousPage
}
},
id
},
id
}
fragment FA on Query {
_reports1t04lE:reports(page:$first_0,first:$first_a,limit:$first_a,order_by:$order_by_1,substate:$substate_2,pre_submission_review_states:$pre_submission_review_states_3) {
total_count,
edges {
node {
_id,
id,
...Fy
},
cursor
},
pageInfo {
hasNextPage,
hasPreviousPage
}
},
id,
...Fz
}
fragment FB on Query {
id,
...Fz
}
fragment FC on Query {
id
}
fragment FD on Query {
me {
username,
_profile_pictureihzmG:profile_picture(size:$size_9),
id
},
id,
...FC
}
fragment FE on Query {
_reports3QQXft:reports(first:$first_0,order_by:$order_by_1,substate:$substate_2,pre_submission_review_states:$pre_submission_review_states_3) {
edges {
node {
id,
...F1,
...F2,
...Fs,
...Ft,
...Fu,
...Fv,
...Fw,
...Fx
},
cursor
},
pageInfo {
hasNextPage,
hasPreviousPage
}
},
id,
...FA,
...FB,
...FD
}

File diff suppressed because it is too large Load Diff

View File

@ -1,700 +0,0 @@
# frozen_string_literal: true
require "graphql"
ADD_WARDEN = false
require "jazz"
require "benchmark/ips"
require "stackprof"
require "memory_profiler"
require "graphql/batch"
require "securerandom"
module GraphQLBenchmark
QUERY_STRING = GraphQL::Introspection::INTROSPECTION_QUERY
DOCUMENT = GraphQL.parse(QUERY_STRING)
SCHEMA = Jazz::Schema
BENCHMARK_PATH = File.expand_path("../", __FILE__)
CARD_SCHEMA = GraphQL::Schema.from_definition(File.read(File.join(BENCHMARK_PATH, "schema.graphql")))
ABSTRACT_FRAGMENTS = GraphQL.parse(File.read(File.join(BENCHMARK_PATH, "abstract_fragments.graphql")))
ABSTRACT_FRAGMENTS_2_QUERY_STRING = File.read(File.join(BENCHMARK_PATH, "abstract_fragments_2.graphql"))
ABSTRACT_FRAGMENTS_2 = GraphQL.parse(ABSTRACT_FRAGMENTS_2_QUERY_STRING)
BIG_SCHEMA = GraphQL::Schema.from_definition(File.join(BENCHMARK_PATH, "big_schema.graphql"))
BIG_QUERY_STRING = File.read(File.join(BENCHMARK_PATH, "big_query.graphql"))
BIG_QUERY = GraphQL.parse(BIG_QUERY_STRING)
FIELDS_WILL_MERGE_SCHEMA = GraphQL::Schema.from_definition("type Query { hello: String }")
FIELDS_WILL_MERGE_QUERY = GraphQL.parse("{ #{Array.new(5000, "hello").join(" ")} }")
module_function
def self.run(task)
Benchmark.ips do |x|
case task
when "query"
x.report("query") { SCHEMA.execute(document: DOCUMENT) }
when "validate"
x.report("validate - introspection ") { CARD_SCHEMA.validate(DOCUMENT) }
x.report("validate - abstract fragments") { CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS) }
x.report("validate - abstract fragments 2") { CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS_2) }
x.report("validate - big query") { BIG_SCHEMA.validate(BIG_QUERY) }
x.report("validate - fields will merge") { FIELDS_WILL_MERGE_SCHEMA.validate(FIELDS_WILL_MERGE_QUERY) }
when "scan"
require "graphql/c_parser"
x.report("scan c - introspection") { GraphQL.scan_with_c(QUERY_STRING) }
x.report("scan - introspection") { GraphQL.scan_with_ruby(QUERY_STRING) }
x.report("scan c - fragments") { GraphQL.scan_with_c(ABSTRACT_FRAGMENTS_2_QUERY_STRING) }
x.report("scan - fragments") { GraphQL.scan_with_ruby(ABSTRACT_FRAGMENTS_2_QUERY_STRING) }
x.report("scan c - big query") { GraphQL.scan_with_c(BIG_QUERY_STRING) }
x.report("scan - big query") { GraphQL.scan_with_ruby(BIG_QUERY_STRING) }
when "parse"
# Uncomment this to use the C parser:
# require "graphql/c_parser"
x.report("parse - introspection") { GraphQL.parse(QUERY_STRING) }
x.report("parse - fragments") { GraphQL.parse(ABSTRACT_FRAGMENTS_2_QUERY_STRING) }
x.report("parse - big query") { GraphQL.parse(BIG_QUERY_STRING) }
else
raise("Unexpected task #{task}")
end
end
end
def self.profile_parse
# To profile the C parser instead:
# require "graphql/c_parser"
report = MemoryProfiler.report do
GraphQL.parse(BIG_QUERY_STRING)
GraphQL.parse(QUERY_STRING)
GraphQL.parse(ABSTRACT_FRAGMENTS_2_QUERY_STRING)
end
report.pretty_print
end
def self.validate_memory
FIELDS_WILL_MERGE_SCHEMA.validate(FIELDS_WILL_MERGE_QUERY)
report = MemoryProfiler.report do
FIELDS_WILL_MERGE_SCHEMA.validate(FIELDS_WILL_MERGE_QUERY)
nil
end
report.pretty_print
end
def self.profile
# Warm up any caches:
SCHEMA.execute(document: DOCUMENT)
# CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS)
res = nil
result = StackProf.run(mode: :wall) do
# CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS)
res = SCHEMA.execute(document: DOCUMENT)
end
StackProf::Report.new(result).print_text
end
def self.build_large_schema
Class.new(GraphQL::Schema) do
query_t = Class.new(GraphQL::Schema::Object) do
graphql_name("Query")
int_ts = 5.times.map do |i|
int_t = Module.new do
include GraphQL::Schema::Interface
graphql_name "Interface#{i}"
5.times do |n2|
field :"field#{n2}", String do
argument :arg, String
end
end
end
field :"int_field_#{i}", int_t
int_t
end
obj_ts = 100.times.map do |n|
input_obj_t = Class.new(GraphQL::Schema::InputObject) do
graphql_name("Input#{n}")
argument :arg, String
end
obj_t = Class.new(GraphQL::Schema::Object) do
graphql_name("Object#{n}")
implements(*int_ts)
20.times do |n2|
field :"field#{n2}", String do
argument :input, input_obj_t
end
end
field :self_field, self
field :int_0_field, int_ts[0]
end
field :"rootfield#{n}", obj_t
obj_t
end
10.times do |n|
union_t = Class.new(GraphQL::Schema::Union) do
graphql_name "Union#{n}"
possible_types(*obj_ts.sample(10))
end
field :"unionfield#{n}", union_t
end
end
query(query_t)
end
end
def self.profile_boot
Benchmark.ips do |x|
x.config(time: 10)
x.report("Booting large schema") {
build_large_schema
}
end
result = StackProf.run(mode: :wall, interval: 1) do
build_large_schema
end
StackProf::Report.new(result).print_text
retained_schema = nil
report = MemoryProfiler.report do
retained_schema = build_large_schema
end
report.pretty_print
end
SILLY_LARGE_SCHEMA = build_large_schema
def self.profile_small_query_on_large_schema
schema = Class.new(SILLY_LARGE_SCHEMA)
Benchmark.ips do |x|
x.report("Run small query") {
schema.execute("{ __typename }")
}
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute("{ __typename }")
end
StackProf::Report.new(result).print_text
StackProf.run(mode: :wall, out: "tmp/small_query.dump", interval: 1) do
schema.execute("{ __typename }")
end
report = MemoryProfiler.report do
schema.execute("{ __typename }")
end
puts "\n\n"
report.pretty_print
end
def self.profile_large_introspection
schema = SILLY_LARGE_SCHEMA
Benchmark.ips do |x|
x.config(time: 10)
x.report("Run large introspection") {
schema.to_json
}
end
result = StackProf.run(mode: :wall) do
schema.to_json
end
StackProf::Report.new(result).print_text
retained_schema = nil
report = MemoryProfiler.report do
schema.to_json
end
puts "\n\n"
report.pretty_print
end
def self.profile_large_analysis
query_str = "query {\n".dup
5.times do |n|
query_str << " intField#{n} { "
20.times do |o|
query_str << "...Obj#{o}Fields "
end
query_str << "}\n"
end
query_str << "}"
20.times do |o|
query_str << "fragment Obj#{o}Fields on Object#{o} { "
20.times do |f|
query_str << " field#{f}(arg: \"a\")\n"
end
query_str << " selfField { selfField { selfField { __typename } } }\n"
# query_str << " int0Field { ...Int0Fields }"
query_str << "}\n"
end
# query_str << "fragment Int0Fields on Interface0 { __typename }"
query = GraphQL::Query.new(SILLY_LARGE_SCHEMA, query_str)
analyzers = [
GraphQL::Analysis::AST::FieldUsage,
GraphQL::Analysis::AST::QueryDepth,
GraphQL::Analysis::AST::QueryComplexity
]
Benchmark.ips do |x|
x.report("Running introspection") {
GraphQL::Analysis::AST.analyze_query(query, analyzers)
}
end
StackProf.run(mode: :wall, out: "last-stackprof.dump", interval: 1) do
GraphQL::Analysis::AST.analyze_query(query, analyzers)
end
result = StackProf.run(mode: :wall, interval: 1) do
GraphQL::Analysis::AST.analyze_query(query, analyzers)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
GraphQL::Analysis::AST.analyze_query(query, analyzers)
end
puts "\n\n"
report.pretty_print
end
# Adapted from https://github.com/rmosolgo/graphql-ruby/issues/861
def self.profile_large_result
schema = ProfileLargeResult::Schema
document = ProfileLargeResult::ALL_FIELDS
Benchmark.ips do |x|
x.config(time: 10)
x.report("Querying for #{ProfileLargeResult::DATA.size} objects") {
schema.execute(document: document)
}
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute(document: document)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.execute(document: document)
end
report.pretty_print
end
def self.profile_small_result
schema = ProfileLargeResult::Schema
document = GraphQL.parse <<-GRAPHQL
query {
foos(first: 5) {
__typename
id
int1
int2
string1
string2
foos(first: 5) {
__typename
string1
string2
foo {
__typename
int1
}
}
}
}
GRAPHQL
Benchmark.ips do |x|
x.config(time: 10)
x.report("Querying for #{ProfileLargeResult::DATA.size} objects") {
schema.execute(document: document)
}
end
StackProf.run(mode: :wall, interval: 1, out: "tmp/small.dump") do
schema.execute(document: document)
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute(document: document)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.execute(document: document)
end
report.pretty_print
end
def self.profile_small_introspection
schema = ProfileLargeResult::Schema
document = GraphQL.parse(GraphQL::Introspection::INTROSPECTION_QUERY)
Benchmark.ips do |x|
x.config(time: 5)
x.report("Introspection") {
schema.execute(document: document)
}
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute(document: document)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.execute(document: document)
end
report.pretty_print
end
module ProfileLargeResult
def self.eager_or_proc(value)
ENV["EAGER"] ? value : -> { value }
end
DATA_SIZE = 1000
DATA = DATA_SIZE.times.map {
eager_or_proc({
id: SecureRandom.uuid,
int1: SecureRandom.random_number(100000),
int2: SecureRandom.random_number(100000),
string1: eager_or_proc(SecureRandom.base64),
string2: SecureRandom.base64,
boolean1: SecureRandom.random_number(1) == 0,
boolean2: SecureRandom.random_number(1) == 0,
int_array: eager_or_proc(10.times.map { eager_or_proc(SecureRandom.random_number(100000)) } ),
string_array: 10.times.map { SecureRandom.base64 },
boolean_array: 10.times.map { SecureRandom.random_number(1) == 0 },
})
}
module Bar
include GraphQL::Schema::Interface
field :string_array, [String], null: false
end
module Baz
include GraphQL::Schema::Interface
implements Bar
field :int_array, [Integer], null: false
field :boolean_array, [Boolean], null: false
end
class ExampleExtension < GraphQL::Schema::FieldExtension
end
class FooType < GraphQL::Schema::Object
implements Baz
field :id, ID, null: false, extensions: [ExampleExtension]
field :int1, Integer, null: false, extensions: [ExampleExtension]
field :int2, Integer, null: false, extensions: [ExampleExtension]
field :string1, String, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :string2, String, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :boolean1, Boolean, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :boolean2, Boolean, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :foos, [FooType], null: false, description: "Return a list of Foo objects" do
argument :first, Integer, default_value: DATA_SIZE
end
def foos(first:)
DATA.first(first)
end
field :foo, FooType
def foo
DATA.sample
end
end
class QueryType < GraphQL::Schema::Object
description "Query root of the system"
field :foos, [FooType], null: false, description: "Return a list of Foo objects" do
argument :first, Integer, default_value: DATA_SIZE
end
def foos(first:)
DATA.first(first)
end
end
class Schema < GraphQL::Schema
query QueryType
# use GraphQL::Dataloader
lazy_resolve Proc, :call
end
ALL_FIELDS = GraphQL.parse <<-GRAPHQL
query($skip: Boolean = false) {
foos {
id @skip(if: $skip)
int1
int2
string1
string2
boolean1
boolean2
stringArray
intArray
booleanArray
}
}
GRAPHQL
end
def self.profile_to_definition
require_relative "./batch_loading"
schema = ProfileLargeResult::Schema
schema.to_definition
Benchmark.ips do |x|
x.report("to_definition") { schema.to_definition }
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.to_definition
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.to_definition
end
report.pretty_print
end
def self.profile_from_definition
# require "graphql/c_parser"
schema_str = SILLY_LARGE_SCHEMA.to_definition
Benchmark.ips do |x|
x.report("from_definition") { GraphQL::Schema.from_definition(schema_str) }
end
result = StackProf.run(mode: :wall, interval: 1) do
GraphQL::Schema.from_definition(schema_str)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
GraphQL::Schema.from_definition(schema_str)
end
report.pretty_print
end
def self.profile_batch_loaders
require_relative "./batch_loading"
include BatchLoading
document = GraphQL.parse <<-GRAPHQL
{
braves: team(name: "Braves") { ...TeamFields }
bulls: team(name: "Bulls") { ...TeamFields }
}
fragment TeamFields on Team {
players {
team {
players {
team {
name
}
}
}
}
}
GRAPHQL
batch_result = GraphQLBatchSchema.execute(document: document).to_h
dataloader_result = GraphQLDataloaderSchema.execute(document: document).to_h
no_batch_result = GraphQLNoBatchingSchema.execute(document: document).to_h
results = [batch_result, dataloader_result, no_batch_result].uniq
if results.size > 1
puts "Batch result:"
pp batch_result
puts "Dataloader result:"
pp dataloader_result
puts "No-batch result:"
pp no_batch_result
raise "Got different results -- fix implementation before benchmarking."
end
Benchmark.ips do |x|
x.report("GraphQL::Batch") { GraphQLBatchSchema.execute(document: document) }
x.report("GraphQL::Dataloader") { GraphQLDataloaderSchema.execute(document: document) }
x.report("No Batching") { GraphQLNoBatchingSchema.execute(document: document) }
x.compare!
end
puts "========== GraphQL-Batch Memory =============="
report = MemoryProfiler.report do
GraphQLBatchSchema.execute(document: document)
end
report.pretty_print
puts "========== Dataloader Memory ================="
report = MemoryProfiler.report do
GraphQLDataloaderSchema.execute(document: document)
end
report.pretty_print
puts "========== No Batch Memory =============="
report = MemoryProfiler.report do
GraphQLNoBatchingSchema.execute(document: document)
end
report.pretty_print
end
def self.profile_schema_memory_footprint
schema = nil
report = MemoryProfiler.report do
query_type = Class.new(GraphQL::Schema::Object) do
graphql_name "Query"
100.times do |i|
type = Class.new(GraphQL::Schema::Object) do
graphql_name "Object#{i}"
field :f, Integer
end
field "f#{i}", type
end
end
thing_type = Class.new(GraphQL::Schema::Object) do
graphql_name "Thing"
field :name, String
end
mutation_type = Class.new(GraphQL::Schema::Object) do
graphql_name "Mutation"
100.times do |i|
mutation_class = Class.new(GraphQL::Schema::RelayClassicMutation) do
graphql_name "Do#{i}"
argument :id, "ID"
field :thing, thing_type
field :things, thing_type.connection_type
end
field "f#{i}", mutation: mutation_class
end
end
schema = Class.new(GraphQL::Schema) do
query(query_type)
mutation(mutation_type)
end
end
report.pretty_print
end
class StackDepthSchema < GraphQL::Schema
class Thing < GraphQL::Schema::Object
field :thing, self do
argument :lazy, Boolean, default_value: false
end
def thing(lazy:)
if lazy
-> { :something }
else
:something
end
end
field :stack_trace_depth, Integer do
argument :lazy, Boolean, default_value: false
end
def stack_trace_depth(lazy:)
get_depth = -> {
graphql_caller = caller.select { |c| c.include?("graphql") }
graphql_caller.size
}
if lazy
get_depth
else
get_depth.call
end
end
end
class Query < GraphQL::Schema::Object
field :thing, Thing
def thing
:something
end
end
query(Query)
lazy_resolve(Proc, :call)
end
def self.profile_stack_depth
query_str = <<-GRAPHQL
query($lazyThing: Boolean!, $lazyStackTrace: Boolean!) {
thing {
thing(lazy: $lazyThing) {
thing(lazy: $lazyThing) {
thing(lazy: $lazyThing) {
thing(lazy: $lazyThing) {
stackTraceDepth(lazy: $lazyStackTrace)
}
}
}
}
}
}
GRAPHQL
eager_res = StackDepthSchema.execute(query_str, variables: { lazyThing: false, lazyStackTrace: false })
lazy_res = StackDepthSchema.execute(query_str, variables: { lazyThing: true, lazyStackTrace: false })
very_lazy_res = StackDepthSchema.execute(query_str, variables: { lazyThing: true, lazyStackTrace: true })
get_depth = ->(result) { result["data"]["thing"]["thing"]["thing"]["thing"]["thing"]["stackTraceDepth"] }
puts <<~RESULT
Result Depth
---------------------
Eager #{get_depth.call(eager_res)}
Lazy #{get_depth.call(lazy_res)}
Very Lazy #{get_depth.call(very_lazy_res)}
RESULT
end
end

View File

@ -1,118 +0,0 @@
# A big schema for testing
type Query {
node(id: ID!): Node
}
interface Node {
id: ID!
}
interface Node2 {
id: ID
}
interface Commentable {
id: ID!
comments: [Comment!]!
}
interface Named {
name: String!
}
type Comment implements Node {
author: Player
body: String!
id: ID!
}
type Card implements Node, Commentable, Node2, Named {
name: String!
converted_mana_cost: Int!
mana_cost: String!
colors: [Color!]!
power: Int
toughness: Int
rules_text: String!
id: ID!
comments: [Comment!]!
}
type Printing implements Node, Commentable, Node2 {
card: Card!
expansion: Expansion!
rarity: Rarity!
artist: Artist!
id: ID!
comments: [Comment!]!
}
type Expansion implements Node, Commentable, Named {
name: String!
code: String!
printings: [Printing!]!
block: Block!
id: ID!
comments: [Comment!]!
}
type Block implements Node, Commentable, Named {
name: String!
expansions: [Expansion!]!
id: ID!
comments: [Comment!]!
}
# Eg shard, guild, clan
type Watermark implements Node, Commentable, Named {
name: String!
cards: [Card!]!
colors: [Color!]!
id: ID!
comments: [Comment!]!
}
type Artist implements Node, Commentable, Named {
name: String!
printings: [Printing!]!
id: ID!
comments: [Comment!]!
}
type Player implements Node, Commentable, Named {
name: String!
decks: [Deck!]!
id: ID!
comments: [Comment!]!
}
type Deck implements Node, Commentable, Named {
name: String!
colors: [Color!]!
slots: [Slot!]!
id: ID!
comments: [Comment!]!
}
type Slot implements Node, Commentable {
deck: Deck!
card: Card!
id: ID!
comments: [Comment!]!
}
enum Color {
WHITE
BLUE
BLACK
RED
GREEN
COLORLESS
}
enum Rarity {
COMMON
UNCOMMON
RARE
MYTHIC_RARE
TIMESHIFTED
}

View File

@ -1,44 +0,0 @@
# frozen_string_literal: true
require 'rubocop'
module Cop
module Development
class ContextIsPassedCop < RuboCop::Cop::Base
MSG = <<-MSG
This method also accepts `context` as an argument. Pass it so that the returned value will reflect the current query, or use another method that isn't context-dependent.
MSG
# These are already context-aware or else not query-related
def_node_matcher :likely_query_specific_receiver?, "
{
(send _ {:ast_node :query :context :warden :ctx :query_ctx :query_context})
(lvar {:ast_node :query :context :warden :ctx :query_ctx :query_context})
(ivar {:@query :@context :@warden})
(send _ {:introspection_system})
}
"
def_node_matcher :method_doesnt_receive_second_context_argument?, <<-MATCHER
(send _ {:get_field :get_argument :get_type} _)
MATCHER
def_node_matcher :method_doesnt_receive_first_context_argument?, <<-MATCHER
(send _ {:fields :arguments :types :enum_values})
MATCHER
def_node_matcher :is_enum_values_call_without_arguments?, "
(send (send _ {:enum :enum_type (ivar {:@enum :@enum_type})}) {:values})
"
def on_send(node)
if (
method_doesnt_receive_second_context_argument?(node) ||
method_doesnt_receive_first_context_argument?(node) ||
is_enum_values_call_without_arguments?(node)
) && !likely_query_specific_receiver?(node.to_a[0])
add_offense(node)
end
end
end
end
end

View File

@ -1,18 +0,0 @@
# frozen_string_literal: true
require 'rubocop'
module Cop
module Development
class NoEvalCop < RuboCop::Cop::Base
MSG_TEMPLATE = "Don't use `%{eval_method_name}` which accepts strings and may result evaluating unexpected code. Use `%{exec_method_name}` instead, and pass a block."
def on_send(node)
case node.method_name
when :module_eval, :class_eval, :instance_eval
message = MSG_TEMPLATE % { eval_method_name: node.method_name, exec_method_name: node.method_name.to_s.sub("eval", "exec").to_sym }
add_offense node, message: message
end
end
end
end
end

View File

@ -1,21 +0,0 @@
# frozen_string_literal: true
require 'rubocop'
module Cop
module Development
# Make sure no tests are focused, from https://github.com/rubocop-hq/rubocop/issues/3773#issuecomment-420662102
class NoFocusCop < RuboCop::Cop::Base
MSG = 'Remove `focus` from tests.'
def_node_matcher :focused?, <<-MATCHER
(send nil? :focus)
MATCHER
def on_send(node)
return unless focused?(node)
add_offense node
end
end
end
end

View File

@ -1,47 +0,0 @@
# frozen_string_literal: true
require 'rubocop'
module Cop
module Development
# A custom Rubocop rule to catch uses of `.none?` without a block.
#
# @see https://github.com/rmosolgo/graphql-ruby/pull/2090
class NoneWithoutBlockCop < RuboCop::Cop::Base
MSG = <<-MD
Instead of `.none?` or `.any?` without a block:
- Use `.empty?` to check for an empty collection (faster)
- Add a block to explicitly check for `false` (more clear)
Run `-a` to replace this with `%{bang}.empty?`.
MD
def on_block(node)
# Since this method was called with a block, it can't be
# a case of `.none?` without a block
ignore_node(node.send_node)
end
def on_send(node)
if !ignored_node?(node) && (node.method_name == :none? || node.method_name == :any?) && node.arguments.size == 0
add_offense(node, message: MSG % { bang: node.method_name == :none? ? "" : "!.." } )
end
end
def autocorrect(node)
lambda do |corrector|
if node.method_name == :none?
corrector.replace(node.location.selector, "empty?")
else
# Backtrack to any chained method calls so we can insert `!` before them
full_exp = node
while node.parent.send_type?
full_exp = node.parent
end
new_source = "!" + full_exp.source_range.source.sub("any?", "empty?")
corrector.replace(full_exp, new_source)
end
end
end
end
end
end

View File

@ -1,93 +0,0 @@
# frozen_string_literal: true
require 'rubocop'
module Cop
module Development
class TraceMethodsCop < RuboCop::Cop::Base
extend RuboCop::Cop::AutoCorrector
TRACE_HOOKS = [
:analyze_multiplex,
:analyze_query,
:authorized,
:authorized_lazy,
:begin_analyze_multiplex,
:begin_authorized,
:begin_dataloader,
:begin_dataloader_source,
:begin_execute_field,
:begin_execute_multiplex,
:begin_parse,
:begin_resolve_type,
:begin_validate,
:dataloader_fiber_exit,
:dataloader_fiber_resume,
:dataloader_fiber_yield,
:dataloader_spawn_execution_fiber,
:dataloader_spawn_source_fiber,
:end_analyze_multiplex,
:end_authorized,
:end_dataloader,
:end_dataloader_source,
:end_execute_field,
:end_execute_multiplex,
:end_parse,
:end_resolve_type,
:end_validate,
:execute_field,
:execute_field_lazy,
:execute_multiplex,
:execute_query,
:execute_query_lazy,
:lex,
:parse,
:resolve_type,
:resolve_type_lazy,
:validate,
]
MSG = "Trace methods should call `super` to pass control to other traces"
def on_def(node)
if TRACE_HOOKS.include?(node.method_name) && !node.each_descendant(:super, :zsuper).any?
add_offense(node) do |corrector|
if node.body
offset = node.loc.column + 2
corrector.insert_after(node.body.loc.expression, "\n#{' ' * offset}super")
end
end
end
end
def on_module(node)
if node.defined_module_name.to_s.end_with?("Trace")
all_defs = []
node.body.each_child_node do |body_node|
if body_node.def_type?
all_defs << body_node.method_name
end
end
missing_defs = TRACE_HOOKS - all_defs
redundant_defs = [
# Not really necessary for making a good trace:
:lex, :analyze_query, :execute_query, :execute_query_lazy,
# Only useful for isolated event tracking:
:dataloader_fiber_exit, :dataloader_spawn_execution_fiber, :dataloader_spawn_source_fiber
]
missing_defs.each do |missing_def|
if all_defs.include?(:"begin_#{missing_def}") && all_defs.include?(:"end_#{missing_def}")
redundant_defs << missing_def
redundant_defs << :"#{missing_def}_lazy"
end
end
missing_defs -= redundant_defs
if missing_defs.any?
add_offense(node, message: "Missing some trace hook methods:\n\n- #{missing_defs.join("\n- ")}")
end
end
end
end
end
end

View File

@ -1,17 +0,0 @@
# This file was generated by Appraisal
source "https://rubygems.org"
gem 'logger'
gem "bootsnap"
gem "ruby-prof", platform: :ruby
gem "pry"
gem "pry-stack_explorer", platform: :ruby
gem "mongoid", "~> 8.0"
gem "libev_scheduler"
gem "evt"
gem "async"
gem "fiber-storage"
gem "concurrent-ruby", "1.3.4"
gemspec path: "../"

Some files were not shown because too many files have changed in this diff Show More