Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-03-12 21:10:21 +00:00
parent 5c0dffc552
commit 42e5a8bf0b
1367 changed files with 197316 additions and 392 deletions

View File

@ -468,7 +468,6 @@ RSpec/BeEq:
- 'ee/spec/services/projects/restore_service_spec.rb'
- 'ee/spec/services/projects/update_service_spec.rb'
- 'ee/spec/services/quick_actions/interpret_service_spec.rb'
- 'ee/spec/services/search/elastic/cluster_reindexing_service_spec.rb'
- 'ee/spec/services/search/project_service_spec.rb'
- 'ee/spec/services/search/zoekt/indexing_task_service_spec.rb'
- 'ee/spec/services/security/configuration/project_set_continuous_vulnerability_scanning_service_spec.rb'

View File

@ -2950,7 +2950,6 @@ RSpec/FeatureCategory:
- 'spec/lib/gitlab/uploads_transfer_spec.rb'
- 'spec/lib/gitlab/url_blockers/domain_allowlist_entry_spec.rb'
- 'spec/lib/gitlab/url_builder_spec.rb'
- 'spec/lib/gitlab/url_sanitizer_spec.rb'
- 'spec/lib/gitlab/usage/metric_spec.rb'
- 'spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb'
- 'spec/lib/gitlab/usage/metrics/instrumentations/active_user_count_metric_spec.rb'

View File

@ -154,7 +154,6 @@ RSpec/ReceiveMessages:
- 'ee/spec/services/merge_requests/mergeability/check_jira_status_service_spec.rb'
- 'ee/spec/services/merge_requests/mergeability/check_path_locks_service_spec.rb'
- 'ee/spec/services/resource_access_tokens/create_service_spec.rb'
- 'ee/spec/services/search/elastic/cluster_reindexing_service_spec.rb'
- 'ee/spec/services/search/project_service_spec.rb'
- 'ee/spec/services/security/orchestration/assign_service_spec.rb'
- 'ee/spec/services/security/scan_result_policies/generate_policy_violation_comment_service_spec.rb'

View File

@ -2,6 +2,25 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 17.9.2 (2025-03-11)
### Fixed (3 changes)
- [Fix the pipe search for zoekt exact search mode](https://gitlab.com/gitlab-org/security/gitlab/-/commit/22030051963d488575df6a2cca19e1d4e0d82a2d) ([merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/183373))
- [Prevent deletion of project_bot users with non-expiring access tokens](https://gitlab.com/gitlab-org/security/gitlab/-/commit/0249d460f0f6b7f867e7a4171e0effae2e4e5135)
- [Use correct project when fetching managed resources templates](https://gitlab.com/gitlab-org/security/gitlab/-/commit/9bac9ffcd01abff2bf0d4d38829655e394dea3b3)
### Security (8 changes)
- [Vendor GraphQL gem](https://gitlab.com/gitlab-org/security/gitlab/-/commit/e78a52dcafa4c7316e01d4dcd35a33ab9623f532) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4841))
- [Ruby-SAML updates](https://gitlab.com/gitlab-org/security/gitlab/-/commit/780f136abbf4b27c18485e5c173aaa18acfcb644) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4828))
- [Prevent custom role to activate group members](https://gitlab.com/gitlab-org/security/gitlab/-/commit/efbd1afae3cce6bb4e03e91ed82b7af27671977f) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4833))
- [approval_rules name is unbounded](https://gitlab.com/gitlab-org/security/gitlab/-/commit/9a4e857ea3223fe56536a81f3d1fdadb833eca8d) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4838))
- [Limit preview size](https://gitlab.com/gitlab-org/security/gitlab/-/commit/b2ce61725c9946766a2a964377fc756d87e8234e) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4821))
- [Maintainer can inject shell code in Google integration configurations that...](https://gitlab.com/gitlab-org/security/gitlab/-/commit/7fd30226d79520b742abd9edc01055766f56c39b) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4824))
- [Filter out internal notes if the recipient do not have access](https://gitlab.com/gitlab-org/security/gitlab/-/commit/a55e5b359912daa90ef07c73ebd66b4bb9b0e2c3) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4815))
- [Mask unencoded userinfo in project mirror errors](https://gitlab.com/gitlab-org/security/gitlab/-/commit/af6f445b66c3353652c47ad36b1c700c2c0572a1) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4789))
## 17.9.1 (2025-02-26)
### Fixed (1 change)
@ -1096,6 +1115,19 @@ entry.
- [Quarantine a flaky test](https://gitlab.com/gitlab-org/gitlab/-/commit/c932e35efdc0e3c6f316a3c2d37045e115ce8cd5) ([merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176452))
- [Finalize migration BackfillRemoteDevelopmentAgentConfigsProjectId](https://gitlab.com/gitlab-org/gitlab/-/commit/da4c63d7aab3685c3fbe9d1e48f68ba2162a0b5e) ([merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/172769))
## 17.8.5 (2025-03-11)
### Security (8 changes)
- [Vendor GraphQL gem](https://gitlab.com/gitlab-org/security/gitlab/-/commit/1d1019ef7f1f447b9438221ff734f07cb8cfc6f7) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4840))
- [Ruby-SAML updates](https://gitlab.com/gitlab-org/security/gitlab/-/commit/f68b16e4319f01144c90b725e6ec62b3c2ca42bd) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4829))
- [Prevent custom role to activate group members](https://gitlab.com/gitlab-org/security/gitlab/-/commit/a1d818293e11796c71553e2c5eaba4e2f358db79) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4834))
- [approval_rules name is unbounded](https://gitlab.com/gitlab-org/security/gitlab/-/commit/89018f0df2df5c9b3c656a0981384ee3872a54e1) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4837))
- [Limit preview size](https://gitlab.com/gitlab-org/security/gitlab/-/commit/9824f5746a38e95dbfa5d991b69c18ec3e5c1194) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4822))
- [Maintainer can inject shell code in Google integration configurations that...](https://gitlab.com/gitlab-org/security/gitlab/-/commit/688e445b811a4ffa2e2a175effe4f211af57afe8) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4825))
- [Filter out internal notes if the recipient do not have access](https://gitlab.com/gitlab-org/security/gitlab/-/commit/117de613cd4025a41c93a25809dd4c5bd47df7b4) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4814))
- [Mask unencoded userinfo in project mirror errors](https://gitlab.com/gitlab-org/security/gitlab/-/commit/73a0c85541ef22125c52be1b4980ced1bfbc1dba) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4777))
## 17.8.4 (2025-02-26)
### Security (4 changes)
@ -1596,6 +1628,19 @@ entry.
- [Remove default on `group_saved_replies_flag feature flag](https://gitlab.com/gitlab-org/gitlab/-/commit/75d49fe13646e1e0d3b68233ac4a965c86853917) ([merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/175647))
- [Remove use_actual_plan_in_license_check flag](https://gitlab.com/gitlab-org/gitlab/-/commit/b8c3fe16aedb69c82ff52d1c695d72e933c4b946) ([merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/175649))
## 17.7.7 (2025-03-11)
### Security (8 changes)
- [Vendor GraphQL gem](https://gitlab.com/gitlab-org/security/gitlab/-/commit/1c7e70de86d4bdd9395c9bcda9f816a608d78a3d) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4839))
- [Ruby-SAML updates](https://gitlab.com/gitlab-org/security/gitlab/-/commit/8ac8f914d289f182de00be49d10f15e7db7bd7a3) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4830))
- [Prevent custom role to activate group members](https://gitlab.com/gitlab-org/security/gitlab/-/commit/1c01e25b256b30a8edca3024909c9b1df56adb59) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4835))
- [approval_rules name is unbounded](https://gitlab.com/gitlab-org/security/gitlab/-/commit/1db850c51fcd8ceab50bced39165464c0943a45b) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4836))
- [Limit preview size](https://gitlab.com/gitlab-org/security/gitlab/-/commit/0ca0f0bad8d2d470b10491fa672e3134cb0d5acf) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4823))
- [Maintainer can inject shell code in Google integration configurations that...](https://gitlab.com/gitlab-org/security/gitlab/-/commit/f22e6beb143b983e2bd76c08beff215816a52079) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4826))
- [Filter out internal notes if the recipient do not have access](https://gitlab.com/gitlab-org/security/gitlab/-/commit/17181360562c5f5b9c4907eb8f8d3aad12b7748b) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4813))
- [Mask unencoded userinfo in project mirror errors](https://gitlab.com/gitlab-org/security/gitlab/-/commit/a7a51b42149a2db1c39a948604c2296c61b1ddb9) ([merge request](https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/4778))
## 17.7.6 (2025-02-26)
### Fixed (1 change)

View File

@ -95,7 +95,7 @@ gem 'doorkeeper', '~> 5.8', '>= 5.8.1', feature_category: :system_access
gem 'doorkeeper-openid_connect', '~> 1.8.10', feature_category: :system_access
gem 'doorkeeper-device_authorization_grant', '~> 1.0.0', feature_category: :system_access
gem 'rexml', '~> 3.4.0', feature_category: :shared
gem 'ruby-saml', '~> 1.17.0', feature_category: :system_access
gem 'ruby-saml', '~> 1.18.0', path: 'vendor/gems/ruby-saml', feature_category: :system_access
gem 'omniauth', '~> 2.1.0', feature_category: :system_access
gem 'omniauth-auth0', '~> 3.1', feature_category: :system_access
gem 'omniauth-azure-activedirectory-v2', '~> 2.0', feature_category: :system_access
@ -164,7 +164,7 @@ gem 'grape-path-helpers', '~> 2.0.1', feature_category: :api
gem 'rack-cors', '~> 2.0.1', require: 'rack/cors', feature_category: :shared
# GraphQL API
gem 'graphql', '~> 2.4.1', feature_category: :api
gem 'graphql', '2.4.11', path: 'vendor/gems/graphql', feature_category: :api
gem 'graphql-docs', '~> 5.0.0', group: [:development, :test], feature_category: :api
gem 'apollo_upload_server', '~> 2.1.6', feature_category: :api
gem 'graphlient', '~> 0.8.0', feature_category: :importers # Used by BulkImport feature (group::import)

View File

@ -294,7 +294,6 @@
{"name":"grape_logging","version":"1.8.4","platform":"ruby","checksum":"efcc3e322dbd5d620a68f078733b7db043cf12680144cd03c982f14115c792d1"},
{"name":"graphlient","version":"0.8.0","platform":"ruby","checksum":"98c408da1d083454e9f5e274f3b0b6261e2a0c2b5f2ed7b3ef9441d46f8e7cb1"},
{"name":"graphlyte","version":"1.0.0","platform":"ruby","checksum":"b5af4ab67dde6e961f00ea1c18f159f73b52ed11395bb4ece297fe628fa1804d"},
{"name":"graphql","version":"2.4.8","platform":"ruby","checksum":"f6ff824c5e476af06fe47179a72f865336abfabfaca3bd6fb58e3d196e3f6fb5"},
{"name":"graphql-client","version":"0.23.0","platform":"ruby","checksum":"f238b8e451676baad06bd15f95396e018192243dcf12c4e6d13fb41d9a2babc1"},
{"name":"graphql-docs","version":"5.0.0","platform":"ruby","checksum":"76baca6e5a803a4b6a9fbbbfdbf16742b7c4c546c8592b6e1a7aa4e79e562d04"},
{"name":"grpc","version":"1.63.0","platform":"aarch64-linux","checksum":"dc75c5fd570b819470781d9512105dddfdd11d984f38b8e60bb946f92d1f79ee"},
@ -639,7 +638,6 @@
{"name":"ruby-lsp-rspec","version":"0.1.22","platform":"ruby","checksum":"e982edf5cd6ec1530c3f5fa7e423624ad00532ebeff7fc94e02c7516a9b759c0"},
{"name":"ruby-magic","version":"0.6.0","platform":"ruby","checksum":"7b2138877b7d23aff812c95564eba6473b74b815ef85beb0eb792e729a2b6101"},
{"name":"ruby-progressbar","version":"1.11.0","platform":"ruby","checksum":"cc127db3866dc414ffccbf92928a241e585b3aa2b758a5563e74a6ee0f57d50a"},
{"name":"ruby-saml","version":"1.17.0","platform":"ruby","checksum":"0419839ba3312d255e35fe3cc7ae155e4a241fd468796caebcf61164aa01b8a9"},
{"name":"ruby-statistics","version":"4.1.0","platform":"ruby","checksum":"7d697abd5dc4e6141d21ecb4165482807564f11bbe154cf1c60a2677b507f2a9"},
{"name":"ruby2_keywords","version":"0.0.5","platform":"ruby","checksum":"ffd13740c573b7301cf7a2e61fc857b2a8e3d3aff32545d6f8300d8bae10e3ef"},
{"name":"rubyntlm","version":"0.6.3","platform":"ruby","checksum":"5b321456dba3130351f7451f8669f1afa83a0d26fd63cdec285b7b88e667102d"},

View File

@ -170,6 +170,14 @@ PATH
google-protobuf (~> 3)
grpc
PATH
remote: vendor/gems/graphql
specs:
graphql (2.4.11)
base64
fiber-storage
logger
PATH
remote: vendor/gems/mail-smtp_pool
specs:
@ -206,6 +214,13 @@ PATH
nokogiri (>= 1.4.4)
omniauth (~> 2.0)
PATH
remote: vendor/gems/ruby-saml
specs:
ruby-saml (1.18.0)
nokogiri (>= 1.13.10)
rexml
PATH
remote: vendor/gems/sidekiq-7.2.4
specs:
@ -941,9 +956,6 @@ GEM
faraday (~> 2.0)
graphql-client
graphlyte (1.0.0)
graphql (2.4.8)
base64
fiber-storage
graphql-client (0.23.0)
activesupport (>= 3.0)
graphql (>= 1.13.0)
@ -1707,9 +1719,6 @@ GEM
ruby-magic (0.6.0)
mini_portile2 (~> 2.8)
ruby-progressbar (1.11.0)
ruby-saml (1.17.0)
nokogiri (>= 1.13.10)
rexml
ruby-statistics (4.1.0)
ruby2_keywords (0.0.5)
rubyntlm (0.6.3)
@ -2153,7 +2162,7 @@ DEPENDENCIES
grape_logging (~> 1.8, >= 1.8.4)
graphlient (~> 0.8.0)
graphlyte (~> 1.0.0)
graphql (~> 2.4.1)
graphql (= 2.4.11)!
graphql-docs (~> 5.0.0)
grpc (= 1.63.0)
gssapi (~> 1.3.1)
@ -2303,7 +2312,7 @@ DEPENDENCIES
ruby-lsp-rspec (~> 0.1.10)
ruby-magic (~> 0.6)
ruby-progressbar (~> 1.10)
ruby-saml (~> 1.17.0)
ruby-saml (~> 1.18.0)!
rubyzip (~> 2.3.2)
rugged (~> 1.6)
sanitize (~> 6.0.2)

View File

@ -294,7 +294,6 @@
{"name":"grape_logging","version":"1.8.4","platform":"ruby","checksum":"efcc3e322dbd5d620a68f078733b7db043cf12680144cd03c982f14115c792d1"},
{"name":"graphlient","version":"0.8.0","platform":"ruby","checksum":"98c408da1d083454e9f5e274f3b0b6261e2a0c2b5f2ed7b3ef9441d46f8e7cb1"},
{"name":"graphlyte","version":"1.0.0","platform":"ruby","checksum":"b5af4ab67dde6e961f00ea1c18f159f73b52ed11395bb4ece297fe628fa1804d"},
{"name":"graphql","version":"2.4.8","platform":"ruby","checksum":"f6ff824c5e476af06fe47179a72f865336abfabfaca3bd6fb58e3d196e3f6fb5"},
{"name":"graphql-client","version":"0.23.0","platform":"ruby","checksum":"f238b8e451676baad06bd15f95396e018192243dcf12c4e6d13fb41d9a2babc1"},
{"name":"graphql-docs","version":"5.0.0","platform":"ruby","checksum":"76baca6e5a803a4b6a9fbbbfdbf16742b7c4c546c8592b6e1a7aa4e79e562d04"},
{"name":"grpc","version":"1.63.0","platform":"aarch64-linux","checksum":"dc75c5fd570b819470781d9512105dddfdd11d984f38b8e60bb946f92d1f79ee"},
@ -649,7 +648,6 @@
{"name":"ruby-lsp-rspec","version":"0.1.22","platform":"ruby","checksum":"e982edf5cd6ec1530c3f5fa7e423624ad00532ebeff7fc94e02c7516a9b759c0"},
{"name":"ruby-magic","version":"0.6.0","platform":"ruby","checksum":"7b2138877b7d23aff812c95564eba6473b74b815ef85beb0eb792e729a2b6101"},
{"name":"ruby-progressbar","version":"1.11.0","platform":"ruby","checksum":"cc127db3866dc414ffccbf92928a241e585b3aa2b758a5563e74a6ee0f57d50a"},
{"name":"ruby-saml","version":"1.17.0","platform":"ruby","checksum":"0419839ba3312d255e35fe3cc7ae155e4a241fd468796caebcf61164aa01b8a9"},
{"name":"ruby-statistics","version":"4.1.0","platform":"ruby","checksum":"7d697abd5dc4e6141d21ecb4165482807564f11bbe154cf1c60a2677b507f2a9"},
{"name":"ruby2_keywords","version":"0.0.5","platform":"ruby","checksum":"ffd13740c573b7301cf7a2e61fc857b2a8e3d3aff32545d6f8300d8bae10e3ef"},
{"name":"rubyntlm","version":"0.6.3","platform":"ruby","checksum":"5b321456dba3130351f7451f8669f1afa83a0d26fd63cdec285b7b88e667102d"},

View File

@ -170,6 +170,14 @@ PATH
google-protobuf (~> 3)
grpc
PATH
remote: vendor/gems/graphql
specs:
graphql (2.4.11)
base64
fiber-storage
logger
PATH
remote: vendor/gems/mail-smtp_pool
specs:
@ -206,6 +214,13 @@ PATH
nokogiri (>= 1.4.4)
omniauth (~> 2.0)
PATH
remote: vendor/gems/ruby-saml
specs:
ruby-saml (1.18.0)
nokogiri (>= 1.13.10)
rexml
PATH
remote: vendor/gems/sidekiq-7.2.4
specs:
@ -953,9 +968,6 @@ GEM
faraday (~> 2.0)
graphql-client
graphlyte (1.0.0)
graphql (2.4.8)
base64
fiber-storage
graphql-client (0.23.0)
activesupport (>= 3.0)
graphql (>= 1.13.0)
@ -1739,9 +1751,6 @@ GEM
ruby-magic (0.6.0)
mini_portile2 (~> 2.8)
ruby-progressbar (1.11.0)
ruby-saml (1.17.0)
nokogiri (>= 1.13.10)
rexml
ruby-statistics (4.1.0)
ruby2_keywords (0.0.5)
rubyntlm (0.6.3)
@ -2187,7 +2196,7 @@ DEPENDENCIES
grape_logging (~> 1.8, >= 1.8.4)
graphlient (~> 0.8.0)
graphlyte (~> 1.0.0)
graphql (~> 2.4.1)
graphql (= 2.4.11)!
graphql-docs (~> 5.0.0)
grpc (= 1.63.0)
gssapi (~> 1.3.1)
@ -2337,7 +2346,7 @@ DEPENDENCIES
ruby-lsp-rspec (~> 0.1.10)
ruby-magic (~> 0.6)
ruby-progressbar (~> 1.10)
ruby-saml (~> 1.17.0)
ruby-saml (~> 1.18.0)!
rubyzip (~> 2.3.2)
rugged (~> 1.6)
sanitize (~> 6.0.2)

View File

@ -388,9 +388,12 @@ export const PROJECT_FILES_GO_BACK = {
defaultKeys: ['esc'],
};
const { blobOverflowMenu } = gon.features ?? {};
export const PROJECT_FILES_GO_TO_PERMALINK = {
id: 'projectFiles.goToFilePermalink',
description: __('Go to file permalink (while viewing a file)'),
description: blobOverflowMenu
? __('Copy file permalink')
: __('Go to file permalink (while viewing a file)'),
defaultKeys: ['y'],
};

View File

@ -3,6 +3,12 @@ import { moveToFilePermalink } from '~/blob/utils';
export default class ShortcutsBlob {
constructor(shortcuts) {
const { blobOverflowMenu } = gon.features ?? {};
if (blobOverflowMenu) {
// TODO: Remove ShortcutsBlob entirely once these feature flags are removed.
return;
}
shortcuts.add(PROJECT_FILES_GO_TO_PERMALINK, moveToFilePermalink);
}
}

View File

@ -1,4 +1,5 @@
import $ from 'jquery';
import { __ } from '~/locale';
import { renderGFM } from '~/behaviors/markdown/render_gfm';
import { SourceEditorExtension } from '~/editor/extensions/source_editor_extension_base';
import { FileTemplateExtension } from '~/editor/extensions/source_editor_file_template_ext';
@ -8,6 +9,7 @@ import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
import { addEditorMarkdownListeners } from '~/lib/utils/text_markdown';
import FilepathFormMediator from '~/blob/filepath_form_mediator';
import { HTTP_STATUS_PAYLOAD_TOO_LARGE } from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
import Api from '~/api';
@ -163,6 +165,18 @@ export default class EditBlob {
}
}
static createBlobAlert = (error) => {
if (error.response.status === HTTP_STATUS_PAYLOAD_TOO_LARGE) {
createAlert({
message: __('The blob is too large to render'),
});
} else {
createAlert({
message: BLOB_PREVIEW_ERROR,
});
}
};
editModeLinkClickHandler(e) {
e.preventDefault();
@ -191,11 +205,9 @@ export default class EditBlob {
currentPane.empty().append(data);
renderGFM(currentPane.get(0));
})
.catch(() =>
createAlert({
message: BLOB_PREVIEW_ERROR,
}),
);
.catch((error) => {
EditBlob.createBlobAlert(error);
});
}
}

View File

@ -31,7 +31,6 @@ export default {
data() {
return {
loadOnClick: true,
presenterPreview: null,
presenterComponent: null,
error: {

View File

@ -46,6 +46,7 @@ export default {
<template>
<gl-link
ref="reference"
class="gl-text-strong"
:class="`gfm gfm-${type}`"
:data-original="`${data.reference}+`"
:data-reference-type="type"

View File

@ -2,6 +2,7 @@
import { GlIcon, GlIntersperse, GlLink, GlSprintf, GlSkeletonLoader } from '@gitlab/ui';
import { helpPagePath } from '~/helpers/help_page_helper';
import { __ } from '~/locale';
import CrudComponent from '~/vue_shared/components/crud_component.vue';
export default {
name: 'ListPresenter',
@ -11,6 +12,7 @@ export default {
GlLink,
GlSprintf,
GlSkeletonLoader,
CrudComponent,
},
inject: ['presenter'],
props: {
@ -37,11 +39,14 @@ export default {
},
},
computed: {
title() {
return this.config.title || __('GLQL list');
},
items() {
return this.data.nodes || [];
},
fields() {
return this.config.fields;
return this.config.fields?.filter((item) => item.key !== 'title');
},
docsPath() {
return `${helpPagePath('user/glql/_index')}#glql-views`;
@ -53,8 +58,14 @@ export default {
};
</script>
<template>
<div class="gl-mb-4">
<component :is="listType" class="!gl-mb-1" data-testid="list">
<crud-component
:title="title"
:description="config.description"
:count="items.length"
is-collapsible
class="!gl-mt-5"
>
<component :is="listType" class="content-list !gl-mb-0" data-testid="list">
<template v-if="isPreview">
<li v-for="i in 5" :key="i">
<gl-skeleton-loader :width="400" :lines="1" />
@ -64,8 +75,13 @@ export default {
<li
v-for="(item, itemIndex) in items"
:key="itemIndex"
class="gl-py-3"
:class="{ 'gl-border-b gl-border-b-section': itemIndex !== items.length - 1 }"
:data-testid="`list-item-${itemIndex}`"
>
<h3 class="!gl-heading-5 !gl-mb-1">
<component :is="presenter.forField(item, 'title')" />
</h3>
<gl-intersperse separator=" · ">
<span v-for="field in fields" :key="field.key">
<component :is="presenter.forField(item, field.key)" />
@ -77,16 +93,16 @@ export default {
{{ __('No data found for this query') }}
</div>
</component>
<div
class="gl-mt-3 gl-flex gl-items-center gl-gap-1 gl-text-sm gl-text-subtle"
data-testid="footer"
>
<gl-icon class="gl-mb-1 gl-mr-1" :size="12" name="tanuki" />
<gl-sprintf :message="$options.i18n.generatedMessage">
<template #link="{ content }">
<gl-link :href="docsPath" target="_blank">{{ content }}</gl-link>
</template>
</gl-sprintf>
</div>
</div>
<template #footer>
<div class="gl-flex gl-items-center gl-gap-1 gl-text-sm gl-text-subtle" data-testid="footer">
<gl-icon class="gl-mb-1 gl-mr-1" :size="12" name="tanuki" />
<gl-sprintf :message="$options.i18n.generatedMessage">
<template #link="{ content }">
<gl-link :href="docsPath" target="_blank">{{ content }}</gl-link>
</template>
</gl-sprintf>
</div>
</template>
</crud-component>
</template>

View File

@ -2,6 +2,7 @@
import { GlIcon, GlLink, GlSprintf, GlSkeletonLoader } from '@gitlab/ui';
import { helpPagePath } from '~/helpers/help_page_helper';
import { __ } from '~/locale';
import CrudComponent from '~/vue_shared/components/crud_component.vue';
import Sorter from '../../core/sorter';
import ThResizable from '../common/th_resizable.vue';
@ -13,6 +14,7 @@ export default {
GlSprintf,
GlSkeletonLoader,
ThResizable,
CrudComponent,
},
inject: ['presenter'],
props: {
@ -39,11 +41,13 @@ export default {
items,
fields: this.config.fields,
sorter: new Sorter(items),
table: null,
};
},
computed: {
title() {
return this.config.title || __('GLQL table');
},
docsPath() {
return `${helpPagePath('user/glql/_index')}#glql-views`;
},
@ -59,61 +63,71 @@ export default {
};
</script>
<template>
<div class="gl-table-shadow !gl-my-4">
<table ref="table" class="!gl-mb-2 !gl-mt-0 gl-overflow-y-hidden">
<thead>
<tr v-if="table">
<th-resizable v-for="(field, fieldIndex) in fields" :key="field.key" :table="table">
<div
:data-testid="`column-${fieldIndex}`"
class="gl-cursor-pointer"
@click="sorter.sortBy(field.key)"
<crud-component
:title="title"
:description="config.description"
:count="items.length"
is-collapsible
class="!gl-mt-5 gl-overflow-hidden"
body-class="!gl-m-[-1px] !gl-p-0"
footer-class="!gl-border-t-0"
>
<div class="gl-table-shadow">
<table ref="table" class="!gl-my-0 gl-overflow-y-hidden">
<thead class="gl-text-sm">
<tr v-if="table">
<th-resizable v-for="(field, fieldIndex) in fields" :key="field.key" :table="table">
<div
:data-testid="`column-${fieldIndex}`"
class="gl-cursor-pointer"
@click="sorter.sortBy(field.key)"
>
{{ field.label }}
<gl-icon
v-if="sorter.options.fieldName === field.key"
:name="sorter.options.ascending ? 'arrow-up' : 'arrow-down'"
/>
</div>
</th-resizable>
</tr>
</thead>
<tbody class="!gl-bg-subtle">
<template v-if="isPreview">
<tr v-for="i in 5" :key="i">
<td v-for="field in fields" :key="field.key">
<gl-skeleton-loader :width="120" :lines="1" />
</td>
</tr>
</template>
<template v-else-if="items.length">
<tr
v-for="(item, itemIndex) in items"
:key="item.id"
:data-testid="`table-row-${itemIndex}`"
>
{{ field.label }}
<gl-icon
v-if="sorter.options.fieldName === field.key"
:name="sorter.options.ascending ? 'arrow-up' : 'arrow-down'"
/>
</div>
</th-resizable>
</tr>
</thead>
<tbody>
<template v-if="isPreview">
<tr v-for="i in 5" :key="i">
<td v-for="field in fields" :key="field.key">
<gl-skeleton-loader :width="120" :lines="1" />
<td v-for="field in fields" :key="field.key">
<component :is="presenter.forField(item, field.key)" />
</td>
</tr>
</template>
<tr v-else-if="!items.length">
<td :colspan="fields.length" class="gl-text-center">
{{ __('No data found for this query') }}
</td>
</tr>
</template>
<template v-else-if="items.length">
<tr
v-for="(item, itemIndex) in items"
:key="item.id"
:data-testid="`table-row-${itemIndex}`"
>
<td v-for="field in fields" :key="field.key">
<component :is="presenter.forField(item, field.key)" />
</td>
</tr>
</template>
<tr v-else-if="!items.length">
<td :colspan="fields.length" class="gl-text-center">
{{ __('No data found for this query') }}
</td>
</tr>
</tbody>
</table>
<div
class="gl-mt-3 gl-flex gl-items-center gl-gap-1 gl-text-sm gl-text-subtle"
data-testid="footer"
>
<gl-icon class="gl-mb-1 gl-mr-1" :size="12" name="tanuki" />
<gl-sprintf :message="$options.i18n.generatedMessage">
<template #link="{ content }">
<gl-link :href="docsPath" target="_blank">{{ content }}</gl-link>
</template>
</gl-sprintf>
</tbody>
</table>
</div>
</div>
<template #footer>
<div class="gl-flex gl-items-center gl-gap-1 gl-text-sm gl-text-subtle" data-testid="footer">
<gl-icon class="gl-mb-1 gl-mr-1" :size="12" name="tanuki" />
<gl-sprintf :message="$options.i18n.generatedMessage">
<template #link="{ content }">
<gl-link :href="docsPath" target="_blank">{{ content }}</gl-link>
</template>
</gl-sprintf>
</div>
</template>
</crud-component>
</template>

View File

@ -38,7 +38,6 @@ const presentersByFieldName = {
const presentersByDisplayType = {
list: ListPresenter,
orderedList: ListPresenter,
table: TablePresenter,
};

View File

@ -1,4 +1,5 @@
import { omitBy } from 'lodash';
import { nextTick } from 'vue';
import Api from '~/api';
import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
@ -23,6 +24,7 @@ import {
prepareSearchAggregations,
setDataToLS,
skipBlobESCount,
buildDocumentTitle,
} from './utils';
export const fetchGroups = ({ commit }, search) => {
@ -103,7 +105,45 @@ export const setFrequentProject = ({ state, commit }, item) => {
commit(types.LOAD_FREQUENT_ITEMS, { key: PROJECTS_LOCAL_STORAGE_KEY, data: frequentItems });
};
export const setQuery = ({ state, commit, getters }, { key, value }) => {
export const fetchSidebarCount = ({ commit, state }) => {
const items = Object.values(state.navigation)
.filter(
(navigationItem) =>
!navigationItem.active &&
navigationItem.count_link &&
skipBlobESCount(state, navigationItem.scope),
)
.map((navItem) => {
const navigationItem = { ...navItem };
const modifications = {
search: state.query?.search || '*',
};
if (navigationItem.scope === SCOPE_BLOB && loadDataFromLS(LS_REGEX_HANDLE)) {
modifications[REGEX_PARAM] = true;
}
navigationItem.count_link = setUrlParams(
modifications,
getNormalizedURL(navigationItem.count_link),
);
return navigationItem;
});
const promises = items.map((navigationItem) =>
axios
.get(navigationItem.count_link)
.then(({ data: { count } }) => {
commit(types.RECEIVE_NAVIGATION_COUNT, { key: navigationItem.scope, count });
})
.catch((e) => logError(e)),
);
return Promise.all(promises);
};
export const setQuery = async ({ state, commit, getters }, { key, value }) => {
commit(types.SET_QUERY, { key, value });
if (SIDEBAR_PARAMS.includes(key)) {
@ -117,10 +157,14 @@ export const setQuery = ({ state, commit, getters }, { key, value }) => {
if (
state.searchType === SEARCH_TYPE_ZOEKT &&
getters.currentScope === SCOPE_BLOB &&
gon.features.zoektMultimatchFrontend
gon.features?.zoektMultimatchFrontend
) {
const newUrl = setUrlParams({ ...state.query }, window.location.href, false, true);
updateHistory({ state: state.query, url: newUrl, replace: true });
document.title = buildDocumentTitle(state.query.search);
updateHistory({ state: state.query, title: state.query.search, url: newUrl, replace: false });
await nextTick();
fetchSidebarCount({ state, commit });
}
};
@ -148,53 +192,16 @@ export const resetQuery = ({ state }) => {
);
};
export const closeLabel = ({ state, commit }, { title }) => {
const labels = state?.query?.[LABEL_FILTER_PARAM].filter((labelName) => labelName !== title);
setQuery({ state, commit }, { key: LABEL_FILTER_PARAM, value: labels });
export const closeLabel = ({ state, commit, getters }, { title }) => {
const labels =
state?.query?.[LABEL_FILTER_PARAM]?.filter((labelName) => labelName !== title) || [];
setQuery({ state, commit, getters }, { key: LABEL_FILTER_PARAM, value: labels });
};
export const setLabelFilterSearch = ({ commit }, { value }) => {
commit(types.SET_LABEL_SEARCH_STRING, value);
};
export const fetchSidebarCount = ({ commit, state }) => {
const items = Object.values(state.navigation)
.filter(
(navigationItem) =>
!navigationItem.active &&
navigationItem.count_link &&
skipBlobESCount(state, navigationItem.scope),
)
.map((navItem) => {
const navigationItem = { ...navItem };
const modifications = {
search: state.query?.search || '*',
};
if (navigationItem.scope === SCOPE_BLOB && loadDataFromLS(LS_REGEX_HANDLE)) {
modifications[REGEX_PARAM] = true;
}
navigationItem.count_link = setUrlParams(
modifications,
getNormalizedURL(navigationItem.count_link),
);
return navigationItem;
});
const promises = items.map((navigationItem) =>
axios
.get(navigationItem.count_link)
.then(({ data: { count } }) => {
commit(types.RECEIVE_NAVIGATION_COUNT, { key: navigationItem.scope, count });
})
.catch((e) => logError(e)),
);
return Promise.all(promises);
};
export const fetchAllAggregation = ({ commit, state }) => {
commit(types.REQUEST_AGGREGATIONS);
return axios

View File

@ -84,3 +84,4 @@ export const SEARCH_LEVEL_PROJECT = 'project';
export const SEARCH_LEVEL_GROUP = 'group';
export const LS_REGEX_HANDLE = `${REGEX_PARAM}_advanced_search`;
export const SEARCH_WINDOW_TITLE = `${s__('GlobalSearch|Search')} · GitLab`;

View File

@ -33,7 +33,7 @@ export default {
state.frequentItems[key] = data;
},
[types.RECEIVE_NAVIGATION_COUNT](state, { key, count }) {
const item = { ...state.navigation[key], count, count_link: null };
const item = { ...state.navigation[key], count };
state.navigation = { ...state.navigation, [key]: item };
},
[types.REQUEST_AGGREGATIONS](state) {

View File

@ -14,6 +14,7 @@ import {
NUMBER_FORMATING_OPTIONS,
REGEX_PARAM,
LS_REGEX_HANDLE,
SEARCH_WINDOW_TITLE,
} from './constants';
function extractKeys(object, keyList) {
@ -114,7 +115,6 @@ export const mergeById = (inflatedData, storedData) => {
export const isSidebarDirty = (currentQuery, urlQuery) => {
return SIDEBAR_PARAMS.some((param) => {
// userAddParam ensures we don't get a false dirty from null !== undefined
const userAddedParam = !urlQuery[param] && currentQuery[param];
const userChangedExistingParam = urlQuery[param] && urlQuery[param] !== currentQuery[param];
@ -219,3 +219,22 @@ export const skipBlobESCount = (state, itemScope) =>
state.zoektAvailable &&
itemScope === SCOPE_BLOB
);
export const buildDocumentTitle = (title) => {
const prevTitle = document.title;
if (prevTitle.includes(SEARCH_WINDOW_TITLE)) {
if (prevTitle.startsWith(SEARCH_WINDOW_TITLE)) {
return `${title} · ${SEARCH_WINDOW_TITLE}`;
}
if (prevTitle.trim().startsWith(` · ${SEARCH_WINDOW_TITLE}`.trim())) {
return `${title} · ${SEARCH_WINDOW_TITLE}`;
}
const pattern = new RegExp(`^.*?(?= · ${SEARCH_WINDOW_TITLE})`);
return prevTitle.replace(pattern, title);
}
// If pattern not found, return the original
return title;
};

View File

@ -1,8 +1,9 @@
<script>
import { GlButton } from '@gitlab/ui';
import { isEmpty } from 'lodash';
import { isEmpty, debounce } from 'lodash';
// eslint-disable-next-line no-restricted-imports
import { mapState, mapActions, mapGetters } from 'vuex';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { InternalEvents } from '~/tracking';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { s__ } from '~/locale';
@ -51,6 +52,10 @@ export default {
return this.query ? this.query.search : '';
},
set(value) {
if (this.isMultiMatch) {
this.debouncedSetQuery({ key: 'search', value });
return;
}
this.setQuery({ key: 'search', value });
},
},
@ -86,6 +91,7 @@ export default {
created() {
this.preloadStoredFrequentItems();
this.regexEnabled = loadDataFromLS(LS_REGEX_HANDLE);
this.debouncedSetQuery = debounce(this.setQuery, DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
},
methods: {
...mapActions(['applyQuery', 'setQuery', 'preloadStoredFrequentItems']),

View File

@ -1,11 +1,12 @@
<script>
import { GlButton, GlIcon, GlLoadingIcon, GlLink, GlTooltipDirective } from '@gitlab/ui';
import { GlButton, GlIcon, GlBadge, GlLoadingIcon, GlLink, GlTooltipDirective } from '@gitlab/ui';
import { __ } from '~/locale';
export default {
components: {
GlButton,
GlIcon,
GlBadge,
GlLoadingIcon,
GlLink,
},
@ -83,6 +84,11 @@ export default {
required: false,
default: null,
},
footerClass: {
type: [String, Object],
required: false,
default: null,
},
persistCollapsedState: {
type: Boolean,
required: false,
@ -208,14 +214,15 @@ export default {
>
<template v-if="displayedCount">
<gl-icon v-if="icon" :name="icon" variant="subtle" data-testid="crud-icon" />
{{ displayedCount }}
<template v-if="icon">{{ displayedCount }}</template>
<gl-badge v-else class="gl-self-baseline">{{ displayedCount }}</gl-badge>
</template>
<slot v-if="$scopedSlots.count" name="count"></slot>
</span>
</h2>
<p
v-if="description || $scopedSlots.description"
class="gl-mb-0 gl-mt-2 gl-text-sm gl-leading-normal gl-text-subtle"
class="!gl-mb-0 !gl-mt-2 !gl-text-sm !gl-leading-normal !gl-text-subtle"
data-testid="crud-description"
>
<slot v-if="$scopedSlots.description" name="description"></slot>
@ -283,8 +290,9 @@ export default {
</div>
<footer
v-if="$scopedSlots.footer"
v-if="isContentVisible && $scopedSlots.footer"
class="gl-border-t gl-rounded-b-base gl-border-section gl-bg-section gl-px-5 gl-py-4"
:class="footerClass"
data-testid="crud-footer"
>
<slot name="footer"></slot>

View File

@ -118,7 +118,7 @@
@apply gl-heading-1-fixed gl-mt-7 gl-pb-2 gl-border-b;
}
h2 {
h2:not(:where(.crud-header) h2) {
@apply gl-heading-2-fixed gl-mt-6 gl-pb-2 gl-border-b;
}
@ -203,13 +203,24 @@
.gl-table-shadow {
overflow-x: auto;
overflow-y: hidden;
animation: gl-table-shadow $gl-easing-out-cubic;
animation-timeline: scroll(self inline);
// Needs to stay white because we use mix-blend-mode
// to make it work in both light and darkmode.
background-color: var(--gl-color-neutral-0);
table:not(.code) {
display: table;
overflow-x: initial;
mix-blend-mode: multiply;
// strech is not available yet, so we have to rely
// on the vendor prefixed ones below
min-width: strech;
/* stylelint-disable-next-line value-no-vendor-prefix */
min-width: -webkit-fill-available;
/* stylelint-disable-next-line value-no-vendor-prefix */
min-width: -moz-available;
}
}

View File

@ -13,6 +13,8 @@ class Projects::BlobController < Projects::ApplicationController
include ProductAnalyticsTracking
extend ::Gitlab::Utils::Override
MAX_PREVIEW_CONTENT = 512.kilobytes
prepend_before_action :authenticate_user!, only: [:edit]
around_action :allow_gitaly_ref_name_caching, only: [:show]
@ -107,6 +109,11 @@ class Projects::BlobController < Projects::ApplicationController
def preview
@content = params[:content]
if @content.bytesize >= MAX_PREVIEW_CONTENT
return render json: { errors: ["Preview content too large"] }, status: :payload_too_large
end
blob.load_all_data!
diffy = Diffy::Diff.new(blob.data, @content, diff: '-U 3', include_diff_info: true)
diff_lines = diffy.diff.scan(/.*\n/)[2..]

View File

@ -55,6 +55,8 @@ class UsersFinder
users = by_custom_attributes(users)
users = by_non_internal(users)
users = by_without_project_bots(users)
users = by_membership(users)
users = by_member_source_ids(users)
order(users)
end
@ -180,6 +182,48 @@ class UsersFinder
users.without_project_bot
end
def by_membership(users)
return users unless params[:by_membership]
group_members = Member
.non_request
.with_source(current_user.authorized_groups.self_and_ancestors)
.select(:user_id)
.to_sql
project_members = Member
.non_request
.with_source(current_user.authorized_projects)
.select(:user_id)
.to_sql
query = "users.id IN (#{group_members} UNION #{project_members})"
users.where(query) # rubocop: disable CodeReuse/ActiveRecord -- finder
end
def by_member_source_ids(users)
group_member_source_ids = params[:group_member_source_ids]
project_member_source_ids = params[:project_member_source_ids]
return users unless group_member_source_ids || project_member_source_ids
member_queries = []
if group_member_source_ids.present?
member_queries << Member.with_source_id(group_member_source_ids).with_source_type('Namespace')
end
if project_member_source_ids.present?
member_queries << Member.with_source_id(project_member_source_ids).with_source_type('Project')
end
return users if member_queries.empty?
member_query = member_queries.reduce(:or).non_request
users.id_in(member_query.select(:user_id))
end
def order(users)
return users unless params[:sort]

View File

@ -5,21 +5,30 @@ module Emails
def new_review_email(recipient_id, review_id)
setup_review_email(review_id, recipient_id)
mail_answer_thread(@merge_request, review_thread_options(recipient_id))
# NOTE: We must not send any internal notes to users who are not supposed to be able to see it.
# Also, we don't want to send an empty email the review only contains internal notes.
unless @recipient.can?(:read_internal_note, @project)
@notes = @notes.reject(&:internal?)
return if @notes.blank?
end
mail_answer_thread(@merge_request, review_thread_options)
end
private
def review_thread_options(recipient_id)
def review_thread_options
{
from: sender(@author.id),
to: User.find(recipient_id).notification_email_for(@merge_request.target_project.group),
to: @recipient.notification_email_for(@merge_request.target_project.group),
subject: subject("#{@merge_request.title} (#{@merge_request.to_reference})")
}
end
def setup_review_email(review_id, recipient_id)
@review = Review.find_by_id(review_id)
@recipient = User.find(recipient_id)
@notes = @review.notes
@discussions = Discussion.build_discussions(@review.discussion_ids, preload_note_diff_file: true)
@include_diff_discussion_stylesheet = @discussions.values.any? do |discussion|

View File

@ -359,10 +359,15 @@ class NotifyPreview < ActionMailer::Preview
end
def new_review_email
review = Review.last
mr_author = review.merge_request.author
mr_author = merge_request.author
Notify.new_review_email(mr_author.id, review.id).message
cleanup do
review = Review.create!(project: project, merge_request: merge_request, author: mr_author)
Note.create!(review: review, project: project, noteable: merge_request, author: mr_author, note: 'Example note 1')
Note.create!(review: review, project: project, noteable: merge_request, author: mr_author, note: 'Example note 2')
Notify.new_review_email(mr_author.id, review.id).message
end
end
def project_was_moved_email

View File

@ -209,6 +209,8 @@ class Member < ApplicationRecord
end
scope :with_source_id, ->(source_id) { where(source_id: source_id) }
scope :with_source, ->(source) { where(source: source) }
scope :with_source_type, ->(source_type) { where(source_type: source_type) }
scope :including_source, -> { includes(:source) }
scope :including_user, -> { includes(:user) }

View File

@ -126,7 +126,7 @@ class ProjectImportState < ApplicationRecord
def mark_as_failed(error_message)
original_errors = errors.dup
sanitized_message = Gitlab::UrlSanitizer.sanitize(error_message)
sanitized_message = sanitized_failure_message(error_message)
fail_op
@ -185,6 +185,10 @@ class ProjectImportState < ApplicationRecord
def user_mapping_enabled?
user_mapping_enabled || project.import_data&.user_mapping_enabled?
end
def sanitized_failure_message(error_message)
Gitlab::UrlSanitizer.sanitize(error_message)
end
end
ProjectImportState.prepend_mod_with('ProjectImportState')

View File

@ -160,7 +160,7 @@ class RemoteMirror < ApplicationRecord
end
def update_error_message(error_message)
self.last_error = Gitlab::UrlSanitizer.sanitize(error_message)
self.last_error = Gitlab::UrlSanitizer.sanitize(error_message, user: user, password: password)
end
def mark_for_retry!(error_message)

View File

@ -5,6 +5,7 @@ class BasePolicy < DeclarativePolicy::Base
with_options scope: :user, score: 0
condition(:admin) do
next false if @user&.from_ci_job_token?
next true if user_is_user? && @user.admin_bot?
if Gitlab::CurrentSettings.admin_mode
Gitlab::Auth::CurrentUserMode.new(@user).admin_mode?

View File

@ -408,6 +408,9 @@ class GroupPolicy < Namespaces::GroupProjectNamespaceSharedPolicy
# ability to read, approve or reject member access requests of other users
enable :admin_member_access_request
enable :read_member_access_request
# ability to activate group members
enable :activate_group_member
end
rule { support_bot & has_project_with_service_desk_enabled }.policy do

View File

@ -0,0 +1,9 @@
---
name: users_search_scoped_to_authorized_namespaces_basic_search
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/442091
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/182557
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/520710
milestone: '17.10'
group: group::global search
type: gitlab_com_derisk
default_enabled: false

View File

@ -0,0 +1,9 @@
---
name: users_search_scoped_to_authorized_namespaces_basic_search_by_ids
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/442091
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/182557
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/524297
milestone: '17.10'
group: group::global search
type: gitlab_com_derisk
default_enabled: false

View File

@ -12,78 +12,78 @@ title: Credentials inventory for GitLab Self-Managed
{{< /details >}}
As a GitLab administrator, you are responsible for the overall security of your instance.
To assist, GitLab provides an inventory of all the credentials that can be used to access
your GitLab Self-Managed instance.
{{< history >}}
This page describes how to manage the credentials inventory for GitLab Self-Managed. To manage credentials on GitLab.com, see [Credentials inventory for GitLab.com](../user/group/credentials_inventory.md).
- Group access tokens [added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102959) in GitLab 15.6.
In the credentials inventory, you can view all:
{{< /history >}}
- Personal access tokens (PATs).
- Project access tokens
- Group access tokens ([introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102959) in GitLab 15.6).
- SSH keys.
- GPG keys.
{{< alert type="note" >}}
You can also [revoke](#revoke-a-users-personal-access-token), [delete](#delete-a-users-ssh-key), and view:
For GitLab.com, see [Credentials inventory for GitLab.com](../user/group/credentials_inventory.md).
- Who they belong to.
- Their access scope.
- Their usage pattern.
- When they:
- Expire.
- Were revoked.
{{< /alert >}}
## Revoke a user's personal access token
Use the credentials inventory to monitor and control access to your GitLab self-managed instance.
You can revoke a user's personal access token.
As an administrator, you can:
- Revoke personal or project access tokens.
- Delete SSH keys.
- Review credential details including:
- Ownership.
- Access scopes.
- Usage patterns.
- Expiration dates.
- Revocation dates.
## Revoke personal access tokens
To revoke a personal access token in your instance:
1. On the left sidebar, at the bottom, select **Admin**.
1. Select **Credentials**.
1. By the personal access token, select **Revoke**.
1. Next to the personal access token, select **Revoke**.
If the token was previously expired or revoked, you'll see the date this happened instead.
If a **Revoke** button is not available, the token may be expired or revoked, or an expiration date set.
| Token state | Revoke button displayed? | Comments |
|-------------|--------------------------|----------------------------------------------------------------------------|
| Active | Yes | Allows administrators to revoke the PAT, such as for a compromised account |
| Expired | No | Not applicable; token is already expired |
| Revoked | No | Not applicable; token is already revoked |
When a PAT is revoked from the credentials inventory, the instance notifies the user by email.
The access token is revoked and the user is notified by email.
![The credentials inventory page listing personal access tokens.](img/credentials_inventory_personal_access_tokens_v14_9.png)
## Revoke a user's project access token
## Revoke project access tokens
To revoke a project access token in your instance:
1. On the left sidebar, at the bottom, select **Admin**.
1. Select **Credentials**.
1. Select the **Project access tokens** tab.
1. By the project access token, select **Revoke**.
1. Next to the project access token, select **Revoke**.
The project access token is revoked and a background worker is queued to delete the project bot user.
The access token is revoked and a background process begins to delete the associated project bot user.
![The credentials inventory page listing project access tokens.](img/credentials_inventory_project_access_tokens_v14_9.png)
## Delete a user's SSH key
## Delete SSH keys
To delete an SSH key in your instance:
1. On the left sidebar, at the bottom, select **Admin**.
1. Select **Credentials**.
1. Select the **SSH Keys** tab.
1. By the SSH key, select **Delete**.
1. Next to the SSH key, select **Delete**.
The instance notifies the user.
The SSH key is deleted and the user is notified.
![The credentials inventory page listing SSH keys.](img/credentials_inventory_ssh_keys_v14_9.png)
## Review existing GPG keys
## View GPG keys
You can view all existing GPG in your GitLab instance by going to the
credentials inventory GPG Keys tab, as well as the following properties:
You can see details for each GPG key including the owner, ID, and [verification status](../user/project/repository/signed_commits/gpg.md).
- Who the GPG key belongs to.
- The ID of the GPG key.
- Whether the GPG key is [verified or unverified](../user/project/repository/signed_commits/gpg.md).
To view information about GPG keys in your instance:
1. On the left sidebar, at the bottom, select **Admin**.
1. Select **Credentials**.
1. Select the **GPG Keys** tab.
![The credentials inventory page listing GPG keys.](img/credentials_inventory_gpg_keys_v14_9.png)

View File

@ -33160,6 +33160,15 @@ Represents a file or directory in the project repository that has been locked.
| <a id="pathlockid"></a>`id` | [`PathLockID!`](#pathlockid) | ID of the path lock. |
| <a id="pathlockpath"></a>`path` | [`String`](#string) | Locked path. |
| <a id="pathlockuser"></a>`user` | [`UserCore`](#usercore) | User that has locked this path. |
| <a id="pathlockuserpermissions"></a>`userPermissions` | [`PathLockPermissions!`](#pathlockpermissions) | Permissions for the current user on the resource. |
### `PathLockPermissions`
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="pathlockpermissionsdestroypathlock"></a>`destroyPathLock` | [`Boolean!`](#boolean) | If `true`, the user can perform `destroy_path_lock` on this resource. |
### `PendingGroupMember`
@ -36215,6 +36224,7 @@ Returns [`UserMergeRequestInteraction`](#usermergerequestinteraction).
| <a id="projectpermissionscreatemergerequestfrom"></a>`createMergeRequestFrom` | [`Boolean!`](#boolean) | If `true`, the user can perform `create_merge_request_from` on this resource. |
| <a id="projectpermissionscreatemergerequestin"></a>`createMergeRequestIn` | [`Boolean!`](#boolean) | If `true`, the user can perform `create_merge_request_in` on this resource. |
| <a id="projectpermissionscreatepages"></a>`createPages` | [`Boolean!`](#boolean) | If `true`, the user can perform `create_pages` on this resource. |
| <a id="projectpermissionscreatepathlock"></a>`createPathLock` | [`Boolean!`](#boolean) | If `true`, the user can perform `create_path_lock` on this resource. |
| <a id="projectpermissionscreatepipeline"></a>`createPipeline` | [`Boolean!`](#boolean) | If `true`, the user can perform `create_pipeline` on this resource. |
| <a id="projectpermissionscreatepipelineschedule"></a>`createPipelineSchedule` | [`Boolean!`](#boolean) | If `true`, the user can perform `create_pipeline_schedule` on this resource. |
| <a id="projectpermissionscreatesnippet"></a>`createSnippet` | [`Boolean!`](#boolean) | If `true`, the user can perform `create_snippet` on this resource. |
@ -36235,6 +36245,7 @@ Returns [`UserMergeRequestInteraction`](#usermergerequestinteraction).
| <a id="projectpermissionsreadenvironment"></a>`readEnvironment` | [`Boolean!`](#boolean) | If `true`, the user can perform `read_environment` on this resource. |
| <a id="projectpermissionsreadmergerequest"></a>`readMergeRequest` | [`Boolean!`](#boolean) | If `true`, the user can perform `read_merge_request` on this resource. |
| <a id="projectpermissionsreadpagescontent"></a>`readPagesContent` | [`Boolean!`](#boolean) | If `true`, the user can perform `read_pages_content` on this resource. |
| <a id="projectpermissionsreadpathlocks"></a>`readPathLocks` | [`Boolean!`](#boolean) | If `true`, the user can perform `read_path_locks` on this resource. |
| <a id="projectpermissionsreadproject"></a>`readProject` | [`Boolean!`](#boolean) | If `true`, the user can perform `read_project` on this resource. |
| <a id="projectpermissionsreadprojectmember"></a>`readProjectMember` | [`Boolean!`](#boolean) | If `true`, the user can perform `read_project_member` on this resource. |
| <a id="projectpermissionsreadwiki"></a>`readWiki` | [`Boolean!`](#boolean) | If `true`, the user can perform `read_wiki` on this resource. |

View File

@ -505,7 +505,7 @@ Supported attributes:
|-------------------------------------|-------------------|----------|-------------|
| `id` | integer or string | Yes | The ID or [URL-encoded path of a project](rest/_index.md#namespaced-paths). |
| `approvals_required` | integer | Yes | The number of required approvals for this rule. |
| `name` | string | Yes | The name of the approval rule. |
| `name` | string | Yes | The name of the approval rule. Limited to 1024 characters. |
| `applies_to_all_protected_branches` | boolean | No | Whether to apply the rule to all protected branches. If set to `true`, ignores the value of `protected_branch_ids`. Default is `false`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/335316) in GitLab 15.3. |
| `group_ids` | Array | No | The IDs of groups as approvers. |
| `protected_branch_ids` | Array | No | The IDs of protected branches to scope the rule by. To identify the ID, [use the API](protected_branches.md#list-protected-branches). |
@ -649,7 +649,7 @@ Supported attributes:
| `id` | integer or string | Yes | The ID or [URL-encoded path of a project](rest/_index.md#namespaced-paths). |
| `approvals_required` | integer | Yes | The number of required approvals for this rule. |
| `approval_rule_id` | integer | Yes | The ID of a approval rule. |
| `name` | string | Yes | The name of the approval rule. |
| `name` | string | Yes | The name of the approval rule. Limited to 1024 characters. |
| `applies_to_all_protected_branches` | boolean | No | Whether to apply the rule to all protected branches. If set to `true`, it ignores the value of `protected_branch_ids`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/335316) in GitLab 15.3. |
| `group_ids` | Array | No | The IDs of groups as approvers. |
| `protected_branch_ids` | Array | No | The IDs of protected branches to scope the rule by. To identify the ID, [use the API](protected_branches.md#list-protected-branches). |
@ -1101,7 +1101,7 @@ Supported attributes:
| `id` | integer or string | Yes | The ID or [URL-encoded path of a project](rest/_index.md#namespaced-paths) |
| `approvals_required` | integer | Yes | The number of required approvals for this rule. |
| `merge_request_iid` | integer | Yes | The IID of the merge request. |
| `name` | string | Yes | The name of the approval rule. |
| `name` | string | Yes | The name of the approval rule. Limited to 1024 characters. |
| `approval_project_rule_id` | integer | No | The ID of a project's approval rule. |
| `group_ids` | Array | No | The IDs of groups as approvers. |
| `user_ids` | Array | No | The IDs of users as approvers. If you provide both `user_ids` and `usernames`, it adds both lists of users. |
@ -1193,7 +1193,7 @@ Supported attributes:
| `merge_request_iid` | integer | Yes | The IID of a merge request. |
| `approvals_required` | integer | No | The number of required approvals for this rule. |
| `group_ids` | Array | No | The IDs of groups as approvers. |
| `name` | string | No | The name of the approval rule. |
| `name` | string | No | The name of the approval rule. Limited to 1024 characters. |
| `remove_hidden_groups` | boolean | No | Whether to remove hidden groups. |
| `user_ids` | Array | No | The IDs of users as approvers. If you provide both `user_ids` and `usernames`, it adds both lists of users. |
| `usernames` | string array | No | The usernames of approvers for this rule (same as `user_ids` but requires a list of usernames). If you provide both `user_ids` and `usernames`, it adds both lists of users. |
@ -1389,7 +1389,7 @@ Supported attributes:
|----------------------|-------------------|----------|-------------|
| `id` | integer or string | Yes | The ID or [URL-encoded path of a group](rest/_index.md#namespaced-paths). |
| `approvals_required` | integer | Yes | The number of required approvals for this rule. |
| `name` | string | Yes | The name of the approval rule. |
| `name` | string | Yes | The name of the approval rule. Limited to 1024 characters. |
| `group_ids` | array | No | The IDs of groups as approvers. |
| `rule_type` | string | No | The rule type. `any_approver` is a pre-configured default rule with `approvals_required` at `0`. Other rules are `regular` (used for regular [merge request approval rules](../user/project/merge_requests/approvals/rules.md)) and `report_approver`. Don't use this field to build approval rules from the API. The `report_approver` field is used when GitLab creates an approval rule from configured and enabled [merge request approval policies](../user/application_security/policies/merge_request_approval_policies.md). |
| `user_ids` | array | No | The IDs of users as approvers. |
@ -1469,7 +1469,7 @@ Supported attributes:
| `id` | integer or string | Yes | The ID or [URL-encoded path of a group](rest/_index.md#namespaced-paths). |
| `approvals_required` | string | No | The number of required approvals for this rule. |
| `group_ids` | integer | No | The IDs of users as approvers. |
| `name` | string | No | The name of the approval rule. |
| `name` | string | No | The name of the approval rule. Limited to 1024 characters. |
| `rule_type` | array | No | The rule type. `any_approver` is a pre-configured default rule with `approvals_required` at `0`. Other rules are `regular` (used for regular [merge request approval rules](../user/project/merge_requests/approvals/rules.md)) and `report_approver`. Don't use this field to build approval rules from the API. The `report_approver` field is used when GitLab creates an approval rule from configured and enabled [merge request approval policies](../user/application_security/policies/merge_request_approval_policies.md). |
| `user_ids` | array | No | The IDs of groups as approvers. |

View File

@ -207,7 +207,7 @@ Documentation of model changes and deprecations is crucial for tracking impact a
### Feature Team Migration Template
Feature teams should use the following [template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/issue_templates/AI%20Model%20Rollout%20Plan.md?ref_type=heads) to implement model migrations. See an example from our [Claude 3.7 Sonnet Code Generation Rollout Plan](https://gitlab.com/gitlab-org/gitlab/-/issues/521044).
Feature teams should use the [AI Model Rollout template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/issue_templates/AI%20Model%20Rollout.md) to implement model migrations. See an example from our [Claude 3.7 Sonnet Code Generation Rollout Plan](https://gitlab.com/gitlab-org/gitlab/-/issues/521044).
### Anthropic Model Migration Tasks

View File

@ -29,7 +29,7 @@ To configure deployment of the PyPI package:
1. Create a token under `Account Settings > Add API Tokens`.
1. For the initial publish, select `Entire account (all projects)` scope. If the project already exists, scope the token to the specific project.
1. Configure credentials:
Locally:
```shell
@ -67,7 +67,31 @@ Deploying services to self-hosted environments poses challenges as services are
Self-hosted customers need to know which version of the service is compatible with their GitLab installation. Python services do not make use of [managed versioning](https://gitlab.com/gitlab-org/release/docs/-/tree/master/components/managed-versioning), so each service needs to handle its versioning and release cuts.
Per convention, once GitLab creates a new release, it can tag the service repo with a new tag named `self-hosted-<gitlab-version>`. An image with that tag is created, as [seen on AI Gateway](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/blob/main/.gitlab/ci/build.gitlab-ci.yml?ref_type=heads#L9). It's important that we have a version tag that matches GitLab versions, making it easier for users to deploy the full environment.
If a service is accessible through cloud-connector, it must adhere to [GitLab Statement Support](https://about.gitlab.com/support/statement-of-support/#version-support), providing stable deployments for the current and previous 2 majors releases of GitLab.
##### Tips
###### Create versions that match GitLab release
When supporting self-hosted deployment, it's important to have a version tag that matches GitLab versions, making it easier
for users to configure the different components of their environment. Add a pipeline to GitLab the GitLab release process
that tags the service repo with the same tag, which will then trigger a pipeline to create an image with the defined tag.
Example: [a pipeline on GitLab](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/aigw-tagging.gitlab-ci.yml) creates a tag on AI Gateway
that [releases a new image](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/blob/main/.gitlab/ci/build.gitlab-ci.yml?ref_type=heads#L24).
###### Multiple release deployments
Supporting 3 major versions can lead to a confusing codebase due to too many code paths. An alternative to keep support while
allowing code clean ups is to provide deployments for multiple versions of the service. For example, suppose GitLab is on
version `19.5`, this would need three deployments of the service:
- One for service version `17.11`, which provides support for all GitLab `17.x` versions
- One for service version `18.11`, which provides support for all GitLab `18.x` versions
- One for service version `19.5`, which provides support for GitLab versions `19.0`-`19.5`.
Once version 18.0 is released, unused code from versions 17.x can be safely removed, since a legacy deployment will be present.
Then, once version 20.0 is released, and GitLab version 17.x is not supported anymore, the legacy deployment can also be removed.
#### Publishing images

View File

@ -49,7 +49,7 @@ Workflow:
## Prerequisites
Before you can use Duo Workflow, you must:
Before you can use Workflow, you must:
- [Install Visual Studio Code](https://code.visualstudio.com/download) (VS Code).
- [Set up the GitLab Workflow extension for VS Code](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#setup). Minimum version 5.16.0.
@ -63,7 +63,7 @@ Before you can use Duo Workflow, you must:
- The repository you want to work with should be small or medium-sized.
Workflow can be slow or fail for large repositories.
To isolate GitLab Duo Workflow in a Docker container, you must complete the [Docker setup](docker_set_up.md). This is not the preferred method to run Duo Workflow.
To isolate GitLab Duo Workflow in a Docker container, you must complete the [Docker setup](docker_set_up.md). This is not the preferred method to run Workflow.
## Use Workflow in VS Code

View File

@ -2,7 +2,7 @@
stage: AI-powered
group: Duo Workflow
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
title: Set up Docker for GitLab Duo Workflow
title: Set up Docker for GitLab Duo Workflow (optional)
---
{{< details >}}
@ -22,6 +22,8 @@ This feature is considered [experimental](../../policy/development_stages_suppor
Use the following guide to set up GitLab Duo Workflow with Docker.
This is not the preferred method to run Workflow.
If you have VS Code and at least version 5.16.0 of the GitLab Workflow extension for VS Code,
you can use Workflow. For more information, see [the prerequisites](_index.md#prerequisites).
## Install Docker and set the socket file path

View File

@ -91,6 +91,7 @@ Values can include:
{{< history >}}
- [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/508956) in GitLab 17.7: Configuring the presentation layer using YAML front matter is deprecated.
- Parameters `title` and `description` [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/183709) in GitLab 17.10.
{{< /history >}}
@ -113,7 +114,7 @@ Views can be embedded in the following areas:
The syntax of views is a superset of YAML that consists of:
- The `query` parameter: Expressions joined together with a logical operator, such as `and`.
- Parameters related to the presentation layer, like `display`, `limit`, or `fields`.
- Parameters related to the presentation layer, like `display`, `limit`, or `fields`, `title`, and `description`.
A GLQL view is defined in Markdown as a code block, similar to other code blocks like Mermaid.
@ -125,6 +126,8 @@ For example:
````yaml
```glql
display: table
title: GLQL table 🎉
description: This view lists my open issues
fields: title, state, health, epic, milestone, weight, updated
limit: 5
query: project = "gitlab-org/gitlab" AND assignee = currentUser() AND state = opened
@ -133,20 +136,22 @@ query: project = "gitlab-org/gitlab" AND assignee = currentUser() AND state = op
This query should render a table like the one below:
![A table listing issues assigned to the current user](img/glql_table_v17_8.png)
![A table listing issues assigned to the current user](img/glql_table_v17_10.png)
#### Presentation syntax
Aside from the `query` parameter, you can configure presentation details for your GLQL query using some
more parameters.
more optional parameters.
Supported parameters:
| Parameter | Default | Description |
| --------- | ------- | ----------- |
| `display` | `table` | How to display the data. Supported options: `table`, `list`, or `orderedList`. |
| `limit` | `100` | How many items to display. The maximum value is `100`. |
| `fields` | `title` | A comma-separated list of [fields](fields.md). All fields allowed in columns of a GLQL view are supported. |
| Parameter | Default | Description |
| ------------- | --------------------------- | ----------- |
| `description` | None | An optional description to display below the title. |
| `display` | `table` | How to display the data. Supported options: `table`, `list`, or `orderedList`. |
| `fields` | `title` | A comma-separated list of [fields](fields.md). All fields allowed in columns of a GLQL view are supported. |
| `limit` | `100` | How many items to display. The maximum value is `100`. |
| `title` | `GLQL table` or `GLQL list` | A title displayed at the top of the GLQL view. |
For example, to display first five issues assigned to current user in the `gitlab-org/gitlab`
project as a list, displaying fields `title`, `health`, and `due`:

Binary file not shown.

After

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

View File

@ -18,61 +18,52 @@ title: Credentials inventory for GitLab.com
{{< /history >}}
As a GitLab.com top-level group owner, you are responsible for the overall security of your groups and projects.
To assist, GitLab provides an inventory of all the credentials that can be used to access your groups and projects.
{{< alert type="note" >}}
This page describes how to manage the credentials inventory for GitLab.com. To manage credentials on GitLab Self-Managed, see [Credentials inventory for GitLab Self-Managed](../../administration/credentials_inventory.md).
For GitLab Self-Managed, see [Credentials inventory for GitLab Self-Managed](../../administration/credentials_inventory.md).
In the credentials inventory, you can view:
{{< /alert >}}
- For [enterprise users](../enterprise_user/_index.md):
- Personal access tokens (PATs).
- SSH keys.
Use the credentials inventory to monitor and control access to your groups and projects for GitLab.com.
You can also:
As the Owner for a top-level group, you can:
- [Revoke a personal access token](#revoke-a-users-personal-access-token).
- [Delete a user's SSH key](#delete-a-users-ssh-key).
- View the following information about access tokens:
- Who they belong to.
- Their access scope.
- Their usage pattern.
- When they:
- Expire.
- Were revoked.
- Revoke personal access tokens.
- Delete SSH keys.
- Review credential details for your [enterprise users](../enterprise_user/_index.md) including:
- Ownership.
- Access scopes.
- Usage patterns.
- Expiration dates.
- Revocation dates.
## Revoke a user's personal access token
## Revoke personal access tokens
You can revoke an enterprise user's personal access token.
To revoke personal access tokens for enterprise users in your group:
1. On the left sidebar, select **Secure**.
1. Select **Credentials**.
1. By the personal access token, select **Revoke**.
1. Next to the personal access token, select **Revoke**.
If the token was previously expired or revoked, you'll see the date this happened instead.
If a **Revoke** option is not available, the token might already be revoked or have expired, or have an expiration date set.
| Token state | **Revoke** option displayed? | Comments |
|-------------|------------------------------|----------------------------------------------------------------------------|
| Active | Yes | Allows administrators to revoke the PAT, such as for a compromised account |
| Expired | No | Not applicable; token is already expired |
| Revoked | No | Not applicable; token is already revoked |
When a PAT is revoked from the credentials inventory, the instance notifies the user by email.
The access token is revoked and the user is notified by email.
![The credentials inventory page listing personal access tokens.](img/group_credentials_inventory_personal_access_tokens_v17_5.png)
## Delete a user's SSH key
## Delete SSH keys
To delete SSH keys for enterprise users in your group:
1. On the left sidebar, select **Secure**.
1. Select **Credentials**.
1. Select the **SSH Keys** tab.
1. By the SSH key, select **Delete**.
1. Next to the SSH key, select **Delete**.
The instance notifies the user.
The SSH key is deleted and the user is notified.
![The credentials inventory page listing SSH keys.](img/group_credentials_inventory_ssh_keys_v17_5.png)
## Revoke a project or group access token
## Revoke project or group access tokens
Credentials inventory on GitLab.com does not support viewing or revoking project or group access tokens.
You cannot view or revoke project or group access tokens using the credentials inventory on GitLab.com.
[Issue 498333](https://gitlab.com/gitlab-org/gitlab/-/issues/498333) proposes to add this feature.

View File

@ -266,7 +266,7 @@ If you must unverify both future and past commits,
- [Managing OpenPGP Keys](https://riseup.net/en/security/message-security/openpgp/gpg-keys)
- [OpenPGP Best Practices](https://riseup.net/en/security/message-security/openpgp/best-practices)
- [Creating a new GPG key with subkeys](https://www.void.gr/kargig/blog/2013/12/02/creating-a-new-gpg-key-with-subkeys/) (advanced)
- [Review existing GPG keys in your instance](../../../../administration/credentials_inventory.md#review-existing-gpg-keys)
- [View GPG keys in your instance](../../../../administration/credentials_inventory.md#view-gpg-keys)
## Troubleshooting

View File

@ -356,7 +356,7 @@ To use SSH with GitLab, copy your public key to your GitLab account:
1. Optional. Select the **Usage type** of the key. It can be used either for `Authentication` or `Signing` or both. `Authentication & Signing` is the default value.
1. Optional. Update **Expiration date** to modify the default expiration date.
- Administrators can view expiration dates and use them for
guidance when [deleting keys](../administration/credentials_inventory.md#delete-a-users-ssh-key).
guidance when [deleting keys](../administration/credentials_inventory.md#delete-ssh-keys).
- GitLab checks all SSH keys at 01:00 AM UTC every day. It emails an expiration notice for all SSH keys that are scheduled to expire seven days from now.
- GitLab checks all SSH keys at 02:00 AM UTC every day. It emails an expiration notice for all SSH keys that expire on the current date.
1. Select **Add key**.

View File

@ -40,7 +40,7 @@ module Banzai
)
# Text matching LINK_OR_IMAGE_PATTERN inside these elements will not be linked
IGNORE_PARENTS = %w[a code kbd pre script style].to_set
IGNORE_PARENTS = %w[a code kbd pre script style span[@data-math-style]].to_set
# The XPath query to use for finding text nodes to parse.
TEXT_QUERY = %(descendant-or-self::text()[

View File

@ -20,15 +20,6 @@ module Gitlab
nodes
end
def active?
# some scalar types (such as integers) do not respond to :authorized?
return false unless @type.respond_to?(:authorized?)
auth = @type.try(:authorization)
auth.nil? || auth.any?
end
private
def perform_before_authorize_action(nodes)
@ -62,16 +53,31 @@ module Gitlab
value
end
def redact_connection(conn, context)
redactor = Redactor.new(@field.type.unwrap.node_type, context, @field.resolver)
return unless redactor.active?
private
def redact_connection(conn, context)
type = @field.type.unwrap.node_type
return unless has_authorization?(type)
redactor = Redactor.new(type, context, @field.resolver)
conn.redactor = redactor if conn.respond_to?(:redactor=)
end
def redact_list(list, context)
redactor = Redactor.new(@field.type.unwrap, context, @field.resolver)
redactor.redact(list) if redactor.active?
type = @field.type.unwrap
return unless has_authorization?(type)
Redactor
.new(type, context, @field.resolver)
.redact(list)
end
def has_authorization?(type)
# some scalar types (such as integers) do not respond to :authorized?
return false unless type.respond_to?(:authorized?)
auth = type.try(:authorization)
auth.nil? || auth.any?
end
def set_skip_type_authorization(context)

View File

@ -109,7 +109,18 @@ module Gitlab
def users
return User.none unless Ability.allowed?(current_user, :read_users_list)
UsersFinder.new(current_user, { search: query, use_minimum_char_limit: false }).execute
params = { search: query, use_minimum_char_limit: false }
if current_user && filters[:autocomplete]
if Feature.enabled?(:users_search_scoped_to_authorized_namespaces_basic_search, current_user)
params[:by_membership] = true
elsif Feature.enabled?(:users_search_scoped_to_authorized_namespaces_basic_search_by_ids, current_user)
params[:group_member_source_ids] = current_user_authorized_group_ids
params[:project_member_source_ids] = current_user_authorized_project_ids
end
end
UsersFinder.new(current_user, params).execute
end
# highlighting is only performed by Elasticsearch backed results
@ -266,6 +277,20 @@ module Gitlab
end
end
def current_user_authorized_group_ids
GroupsFinder
.new(current_user, { all_available: false })
.execute
.pluck("#{Group.table_name}.#{Group.primary_key}") # rubocop: disable CodeReuse/ActiveRecord -- need to find ids
end
def current_user_authorized_project_ids
ProjectsFinder
.new(current_user: current_user, params: { non_public: true })
.execute
.pluck_primary_key
end
# rubocop: disable CodeReuse/ActiveRecord
def limited_count(relation)
relation.reorder(nil).limit(count_limit).size

View File

@ -4,16 +4,11 @@ module Gitlab
class UrlSanitizer
include Gitlab::Utils::StrongMemoize
MASK = '*****'
ALLOWED_SCHEMES = %w[http https ssh git].freeze
ALLOWED_WEB_SCHEMES = %w[http https].freeze
SCHEMIFIED_SCHEME = 'glschemelessuri'
SCHEMIFY_PLACEHOLDER = "#{SCHEMIFIED_SCHEME}://".freeze
# SCP style URLs have a format of [userinfo]@[host]:[path] with them not containing
# port arguments as that is passed along with a -P argument
SCP_REGEX = %r{
#{URI::REGEXP::PATTERN::USERINFO}@#{URI::REGEXP::PATTERN::HOST}:
(?!\b\d+\b) # use word boundaries to ensure no standalone digits after the colon
}x
# URI::DEFAULT_PARSER.make_regexp will only match URLs with schemes or
# relative URLs. This section will match schemeless URIs with userinfo
# e.g. user:pass@gitlab.com but will not match scp-style URIs e.g.
@ -23,14 +18,12 @@ module Gitlab
# also match non-escaped userinfo e.g foo:b?r@gitlab.com which should be
# encoded as foo:b%3Fr@gitlab.com
URI_REGEXP = %r{
(?:
#{URI::DEFAULT_PARSER.make_regexp(ALLOWED_SCHEMES)}
|
(?# negative lookahead before the schemeless matcher ensures this isn't an SCP-style URL)
(?!#{SCP_REGEX})
(?:(?:(?!@)[%#{URI::REGEXP::PATTERN::UNRESERVED}#{URI::REGEXP::PATTERN::RESERVED}])+(?:@))
#{URI::REGEXP::PATTERN::HOSTPORT}
)
(?# negative lookahead for masked userinfo *****, *****:, *****:*****, or :*****)
(?!.*?(\*{5}$|\*{5}:$|\*{5}:\*{5}|:\*{5}))
#{URI::REGEXP::PATTERN::USERINFO}@
(?# negative lookahead to ensure this isn't an SCP-style URL)
(?!#{URI::REGEXP::PATTERN::HOST}:(?!\b\d+\b))
#{URI::REGEXP::PATTERN::HOSTPORT}
}x
# This expression is derived from `URI::REGEXP::PATTERN::USERINFO` but with the
# addition of `{` and `}` in the list of allowed characters to account for the
@ -39,7 +32,8 @@ module Gitlab
# http://myuser:{masked_password}@{masked_domain}.com/{masked_hook}
MASKED_USERINFO_REGEX = %r{(?:[\\-_.!~*'()a-zA-Z\d;:&=+$,{}]|%[a-fA-F\d]{2})*}
def self.sanitize(content)
def self.sanitize(content, user: nil, password: nil)
content = sanitize_unencoded(content, user: user, password: password)
content.gsub(URI_REGEXP) do |url|
new(url).masked_url
rescue Addressable::URI::InvalidURIError
@ -47,6 +41,16 @@ module Gitlab
end
end
def self.sanitize_unencoded(content, user: nil, password: nil)
return content unless user.present? || password.present?
unencoded_basic_auth_regex =
%r{#{Regexp.escape(user.to_s)}:#{'?' if password.blank?}#{Regexp.escape(password.to_s)}@}
masked_basic_auth = "#{MASK if user.present?}#{%(:#{MASK}) if password.present?}@"
content.gsub(unencoded_basic_auth_regex, masked_basic_auth)
end
private_class_method :sanitize_unencoded
def self.valid?(url, allowed_schemes: ALLOWED_SCHEMES)
return false unless url.present?
return false unless url.is_a?(String)
@ -97,8 +101,8 @@ module Gitlab
def masked_url
url = @url.dup
url.password = "*****" if url.password.present?
url.user = "*****" if url.user.present?
url.password = MASK if url.password.present?
url.user = MASK if url.user.present?
reverse_schemify(url.to_s)
end
strong_memoize_attr :masked_url

View File

@ -16881,6 +16881,9 @@ msgstr ""
msgid "Copy file path"
msgstr ""
msgid "Copy file permalink"
msgstr ""
msgid "Copy image URL"
msgstr ""
@ -25888,6 +25891,12 @@ msgstr ""
msgid "GCP region configured"
msgstr ""
msgid "GLQL list"
msgstr ""
msgid "GLQL table"
msgstr ""
msgid "GLQL view timed out. Add more filters to reduce the number of results."
msgstr ""
@ -43839,6 +43848,9 @@ msgstr ""
msgid "Please enter a name for the custom emoji."
msgstr ""
msgid "Please enter a name with less than %{number} characters."
msgstr ""
msgid "Please enter a non-negative number"
msgstr ""
@ -58532,6 +58544,9 @@ msgstr ""
msgid "The base URL to the Harbor instance linked to the GitLab project. For example, `https://demo.goharbor.io`."
msgstr ""
msgid "The blob is too large to render"
msgstr ""
msgid "The branch for this project has no active pipeline configuration."
msgstr ""

View File

@ -7,7 +7,7 @@ RSpec.describe 'processing of SAMLResponse in dependencies' do
let(:mock_saml_response) { File.read('spec/fixtures/authentication/saml_response.xml') }
let(:saml_strategy) { OmniAuth::Strategies::SAML.new({}) }
let(:session_mock) { {} }
let(:settings) { double('settings', { soft: false, idp_cert_fingerprint: 'something' }) }
let(:settings) { double('settings', { soft: false, idp_cert_fingerprint: 'something', check_malformed_doc: true }) }
let(:auth_hash) { Gitlab::Auth::Saml::AuthHash.new(saml_strategy) }
subject { auth_hash.authn_context }

View File

@ -7,7 +7,13 @@ RSpec.describe 'User searches for code', :js, :disable_rate_limiter, feature_cat
include ListboxHelpers
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:project) { create(:project, :repository, namespace: user.namespace) }
let_it_be_with_reload(:project) do
# This helps with some of the test flakiness.
project = create(:project, :repository, namespace: user.namespace)
project.repository.root_ref
project.repository.ls_files('master')
project
end
context 'when signed in' do
before do

View File

@ -149,6 +149,77 @@ RSpec.describe UsersFinder do
users = described_class.new(user, admins: true).execute
expect(users).to contain_exactly(user, normal_user, external_user, admin_user, unconfirmed_user, omniauth_user, internal_user, project_bot, service_account_user)
end
context 'when filtering by_membership' do
let_it_be(:group_user) { create(:user) }
let_it_be(:project_user) { create(:user) }
let_it_be(:group) { create(:group, developers: [user]) }
let_it_be(:project) { create(:project, developers: [user]) }
subject(:users) { described_class.new(user, by_membership: true).execute }
it 'includes the user and project owner' do
expect(users).to contain_exactly(user, project.owner)
end
it 'includes users who are members of the user groups' do
group.add_developer(group_user)
expect(users).to contain_exactly(user, project.owner, group_user)
end
it 'includes users who are members of the user projects' do
project.add_developer(project_user)
expect(users).to contain_exactly(user, project.owner, project_user)
end
end
context 'when filtering by_member_source_ids' do
let_it_be(:group_user) { create(:user) }
let_it_be(:project_user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
it 'filters by group membership' do
group.add_developer(group_user)
users = described_class.new(user, group_member_source_ids: [group.id]).execute
expect(users).to contain_exactly(group_user)
end
it 'filters by project membership' do
project.add_developer(project_user)
users = described_class.new(user, project_member_source_ids: [project.id]).execute
expect(users).to contain_exactly(project_user, project.owner)
end
it 'filters by group and project membership' do
group.add_developer(group_user)
project.add_developer(project_user)
users = described_class
.new(user, group_member_source_ids: [group.id], project_member_source_ids: [project.id])
.execute
expect(users).to contain_exactly(group_user, project_user, project.owner)
end
it 'does not include members not part of the filtered group' do
users = described_class.new(user, group_member_source_ids: [group.id]).execute
expect(users).not_to include(group_user)
end
it 'does not include members not part of the filtered project' do
users = described_class.new(user, project_member_source_ids: [project.id]).execute
expect(users).not_to include(project_user)
end
end
end
shared_examples 'executes users finder as admin' do

View File

@ -0,0 +1,56 @@
import ShortcutsBlob from '~/behaviors/shortcuts/shortcuts_blob';
import { PROJECT_FILES_GO_TO_PERMALINK } from '~/behaviors/shortcuts/keybindings';
import { moveToFilePermalink } from '~/blob/utils';
describe('ShortcutsBlob', () => {
const shortcuts = {
add: jest.fn(),
};
const init = () => {
return new ShortcutsBlob(shortcuts);
};
beforeEach(() => {
shortcuts.add.mockClear();
window.gon = {};
});
describe('constructor', () => {
describe('when shortcuts should be added', () => {
it('adds the permalink shortcut when gon.features is undefined', () => {
init();
expect(shortcuts.add).toHaveBeenCalledWith(
PROJECT_FILES_GO_TO_PERMALINK,
moveToFilePermalink,
);
});
it('adds shortcuts when blobOverflowMenu is false', () => {
window.gon.features = {
blobOverflowMenu: false,
};
init();
expect(shortcuts.add).toHaveBeenCalledWith(
PROJECT_FILES_GO_TO_PERMALINK,
moveToFilePermalink,
);
});
});
describe('when shortcuts should not be added', () => {
it('does not add shortcuts when blobOverflowMenu is true', () => {
window.gon.features = {
blobOverflowMenu: true,
};
init();
expect(shortcuts.add).not.toHaveBeenCalled();
});
});
});
});

View File

@ -12,7 +12,11 @@ import { ToolbarExtension } from '~/editor/extensions/source_editor_toolbar_ext'
import SourceEditor from '~/editor/source_editor';
import axios from '~/lib/utils/axios_utils';
import { TEST_HOST } from 'helpers/test_constants';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import {
HTTP_STATUS_INTERNAL_SERVER_ERROR,
HTTP_STATUS_OK,
HTTP_STATUS_PAYLOAD_TOO_LARGE,
} from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
import { createAlert } from '~/alert';
import Api from '~/api';
@ -264,4 +268,36 @@ describe('Blob Editing', () => {
);
});
});
describe('handles error during preview', () => {
const endpoint = `${TEST_HOST}/preview`;
const setupSpec = async () => {
await initEditor();
const findPreviewLink = () => document.querySelector('a[href="#preview"]');
findPreviewLink().dataset.previewUrl = endpoint;
findPreviewLink().click();
await waitForPromises();
};
it('creates an alert for file size limit exceeded', async () => {
mock.onPost(endpoint).reply(HTTP_STATUS_PAYLOAD_TOO_LARGE);
await setupSpec();
expect(createAlert).toHaveBeenCalledWith(
expect.objectContaining({
message: 'The blob is too large to render',
}),
);
});
it('creates a generic alert for other errors', async () => {
mock.onPost(endpoint).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
await setupSpec();
expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred previewing the blob',
});
});
});
});

View File

@ -51,9 +51,9 @@ describe('ListPresenter', () => {
expect(htmlPresenter1.props('data')).toBe(MOCK_ISSUES.nodes[0].description);
expect(htmlPresenter2.props('data')).toBe(MOCK_ISSUES.nodes[1].description);
expect(listItem1.text()).toEqual('Issue 1 (#1) · @foobar · Open · This is a description');
expect(listItem1.text()).toEqual('Issue 1 (#1) @foobar · Open · This is a description');
expect(listItem2.text()).toEqual(
'Issue 2 (#2 - closed) · @janedoe · Closed · This is another description',
'Issue 2 (#2 - closed) @janedoe · Closed · This is another description',
);
});

View File

@ -220,7 +220,7 @@ export const MOCK_DATA_FOR_NAVIGATION_ACTION_MUTATION = {
label: 'Projects',
scope: 'projects',
link: '/search?scope=projects&search=et',
count_link: null,
count_link: '/search/count?scope=projects&search=et',
},
};

View File

@ -1,6 +1,9 @@
import MockAdapter from 'axios-mock-adapter';
import { mapValues } from 'lodash';
// rspec spec/frontend/fixtures/search_navigation.rb to generate this file
import noActiveItems from 'test_fixtures/search_navigation/no_active_items.json';
import testAction from 'helpers/vuex_action_helper';
import { setUrlParams, updateHistory } from '~/lib/utils/url_utility';
import Api from '~/api';
import { createAlert } from '~/alert';
import * as logger from '~/lib/logger';
@ -45,6 +48,17 @@ jest.mock('~/lib/logger', () => ({
logError: jest.fn(),
}));
jest.mock('~/lib/utils/url_utility', () => {
const urlUtility = jest.requireActual('~/lib/utils/url_utility');
return {
__esModule: true,
...urlUtility,
setUrlParams: jest.fn(() => 'mocked-new-url'),
updateHistory: jest.fn(),
};
});
describe('Global Search Store Actions', () => {
let mock;
let state;
@ -159,41 +173,112 @@ describe('Global Search Store Actions', () => {
});
});
describe.each`
payload | isDirty | isDirtyMutation
${{ key: SIDEBAR_PARAMS[0], value: 'test' }} | ${false} | ${[{ type: types.SET_SIDEBAR_DIRTY, payload: false }]}
${{ key: SIDEBAR_PARAMS[0], value: 'test' }} | ${true} | ${[{ type: types.SET_SIDEBAR_DIRTY, payload: true }]}
${{ key: SIDEBAR_PARAMS[1], value: 'test' }} | ${false} | ${[{ type: types.SET_SIDEBAR_DIRTY, payload: false }]}
${{ key: SIDEBAR_PARAMS[1], value: 'test' }} | ${true} | ${[{ type: types.SET_SIDEBAR_DIRTY, payload: true }]}
${{ key: 'non-sidebar', value: 'test' }} | ${false} | ${[]}
${{ key: 'non-sidebar', value: 'test' }} | ${true} | ${[]}
`('setQuery', ({ payload, isDirty, isDirtyMutation }) => {
describe(`when filter param is ${payload.key} and utils.isSidebarDirty returns ${isDirty}`, () => {
const expectedMutations = [{ type: types.SET_QUERY, payload }].concat(isDirtyMutation);
describe('setQuery', () => {
describe('when search type is zoekt and scope is blob with zoektMultimatchFrontend feature enabled', () => {
const payload = { key: 'some-key', value: 'some-value' };
let originalGon;
let commit;
let fetchSidebarCountSpy;
beforeEach(() => {
storeUtils.isSidebarDirty = jest.fn().mockReturnValue(isDirty);
originalGon = window.gon;
commit = jest.fn();
fetchSidebarCountSpy = jest
.spyOn(actions, 'fetchSidebarCount')
.mockImplementation(() => Promise.resolve());
window.gon = { features: { zoektMultimatchFrontend: true } };
storeUtils.isSidebarDirty = jest.fn().mockReturnValue(false);
state = createState({
query: { ...MOCK_QUERY, search: 'test-search' },
navigation: { ...MOCK_NAVIGATION },
searchType: 'zoekt',
});
});
it(`should dispatch the correct mutations`, () => {
return testAction({ action: actions.setQuery, payload, state, expectedMutations });
afterEach(() => {
window.gon = originalGon;
fetchSidebarCountSpy.mockRestore();
});
it('should update URL, document title, and history', async () => {
const getters = { currentScope: 'blobs' };
await actions.setQuery({ state, commit, getters }, payload);
expect(setUrlParams).toHaveBeenCalledWith(
{ ...state.query },
window.location.href,
false,
true,
);
expect(document.title).toBe(state.query.search);
expect(updateHistory).toHaveBeenCalledWith({
state: state.query,
title: state.query.search,
url: 'mocked-new-url',
replace: false,
});
});
it('does not update URL or fetch sidebar counts when conditions are not met', async () => {
let getters = { currentScope: 'blobs' };
state.searchType = 'not-zoekt';
await actions.setQuery({ state, commit, getters }, payload);
expect(setUrlParams).not.toHaveBeenCalled();
expect(updateHistory).not.toHaveBeenCalled();
expect(fetchSidebarCountSpy).not.toHaveBeenCalled();
setUrlParams.mockClear();
updateHistory.mockClear();
fetchSidebarCountSpy.mockClear();
state.searchType = 'zoekt';
getters = { currentScope: 'not-blobs' };
await actions.setQuery({ state, commit, getters }, payload);
expect(setUrlParams).not.toHaveBeenCalled();
expect(updateHistory).not.toHaveBeenCalled();
expect(fetchSidebarCountSpy).not.toHaveBeenCalled();
setUrlParams.mockClear();
updateHistory.mockClear();
fetchSidebarCountSpy.mockClear();
getters = { currentScope: 'blobs' };
window.gon.features.zoektMultimatchFrontend = false;
await actions.setQuery({ state, commit, getters }, payload);
expect(setUrlParams).not.toHaveBeenCalled();
expect(updateHistory).not.toHaveBeenCalled();
expect(fetchSidebarCountSpy).not.toHaveBeenCalled();
});
});
});
describe.each`
payload
${{ key: REGEX_PARAM, value: true }}
${{ key: REGEX_PARAM, value: { random: 'test' } }}
`('setQuery', ({ payload }) => {
describe(`when query param is ${payload.key}`, () => {
beforeEach(() => {
storeUtils.setDataToLS = jest.fn();
actions.setQuery({ state, commit: jest.fn() }, payload);
});
describe.each`
payload
${{ key: REGEX_PARAM, value: true }}
${{ key: REGEX_PARAM, value: { random: 'test' } }}
`('setQuery with REGEX_PARAM', ({ payload }) => {
describe(`when query param is ${payload.key}`, () => {
beforeEach(() => {
storeUtils.setDataToLS = jest.fn();
window.gon = { features: { zoektMultimatchFrontend: false } };
const getters = { currentScope: 'not-blobs' };
actions.setQuery({ state, commit: jest.fn(), getters }, payload);
});
it(`setsItem in local storage`, () => {
expect(storeUtils.setDataToLS).toHaveBeenCalledWith(LS_REGEX_HANDLE, expect.anything());
it(`setsItem in local storage`, () => {
expect(storeUtils.setDataToLS).toHaveBeenCalledWith(LS_REGEX_HANDLE, expect.anything());
});
});
});
});
@ -201,7 +286,12 @@ describe('Global Search Store Actions', () => {
describe('applyQuery', () => {
beforeEach(() => {
setWindowLocation('https://test/');
jest.spyOn(urlUtils, 'visitUrl').mockReturnValue({});
jest.spyOn(urlUtils, 'visitUrl').mockImplementation(() => {});
jest
.spyOn(urlUtils, 'setUrlParams')
.mockReturnValue(
'https://test/?scope=issues&state=all&group_id=1&language%5B%5D=C&language%5B%5D=JavaScript&label_name%5B%5D=Aftersync&label_name%5B%5D=Brist&search=*',
);
});
it('calls visitUrl and setParams with the state.query', async () => {
@ -355,17 +445,29 @@ describe('Global Search Store Actions', () => {
});
});
describe('fetchSidebarCount uses wild card seach', () => {
describe('fetchSidebarCount uses wild card search', () => {
beforeEach(() => {
state.navigation = MOCK_NAVIGATION;
state.urlQuery.search = '';
state.navigation = noActiveItems;
state.query = { search: '' };
state.urlQuery = { search: '' };
jest.spyOn(urlUtils, 'setUrlParams').mockImplementation((params) => {
return `http://test.host/search/count?search=${params.search || '*'}`;
});
storeUtils.skipBlobESCount = jest.fn().mockReturnValue(true);
mock.onGet().reply(HTTP_STATUS_OK, MOCK_ENDPOINT_RESPONSE);
});
it('should use wild card', async () => {
await testAction({ action: actions.fetchSidebarCount, state, expectedMutations: [] });
expect(mock.history.get[0].url).toBe('http://test.host/search/count?scope=projects&search=*');
expect(mock.history.get[3].url).toBe(
'http://test.host/search/count?scope=merge_requests&search=*',
const commit = jest.fn();
await actions.fetchSidebarCount({ commit, state });
expect(urlUtils.setUrlParams).toHaveBeenCalledWith(
expect.objectContaining({ search: '*' }),
expect.anything(),
);
});
});
@ -409,16 +511,16 @@ describe('Global Search Store Actions', () => {
{
payload: {
key: 'label_name',
value: ['Aftersync', 'Brist'],
value: ['Aftersync'],
},
type: 'SET_QUERY',
},
{
payload: true,
payload: false,
type: 'SET_SIDEBAR_DIRTY',
},
];
return testAction(actions.closeLabel, { key: '60' }, state, expectedResult, []);
return testAction(actions.closeLabel, { title: 'Brist' }, state, expectedResult, []);
});
});

View File

@ -1,3 +1,4 @@
// rspec spec/frontend/fixtures/search_navigation.rb to generate these files
import subItemActive from 'test_fixtures/search_navigation/sub_item_active.json';
import noActiveItems from 'test_fixtures/search_navigation/no_active_items.json';
import partialNavigationActive from 'test_fixtures/search_navigation/partial_navigation_active.json';
@ -17,6 +18,7 @@ import {
injectRegexSearch,
scopeCrawler,
skipBlobESCount,
buildDocumentTitle,
} from '~/search/store/utils';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
@ -447,4 +449,46 @@ describe('Global Search Store Utils', () => {
expect(skipBlobESCount(state, SCOPE_BLOB)).toBe(false);
});
});
describe('buildDocumentTitle', () => {
const SEARCH_WINDOW_TITLE = `Search`; // Make sure this matches your actual constant
let originalTitle;
beforeEach(() => {
originalTitle = document.title;
});
afterEach(() => {
document.title = originalTitle;
});
it('returns original title when document title does not include search title', () => {
document.title = 'GitLab';
expect(buildDocumentTitle('test')).toBe('test');
});
it('prepends new title when document title starts with search title', () => {
document.title = `${SEARCH_WINDOW_TITLE} · GitLab`;
const result = buildDocumentTitle('test');
expect(result).toBe(`test · ${SEARCH_WINDOW_TITLE} · GitLab`);
});
it('prepends new title when document title starts with dot and search title', () => {
document.title = ` · ${SEARCH_WINDOW_TITLE} · GitLab`;
const result = buildDocumentTitle('test');
expect(result).toBe(`test · ${SEARCH_WINDOW_TITLE} · GitLab`);
});
it('replaces title before search title with new title', () => {
document.title = `Issues · ${SEARCH_WINDOW_TITLE} · GitLab`;
const result = buildDocumentTitle('test');
expect(result).toBe(`test · ${SEARCH_WINDOW_TITLE} · GitLab`);
});
it('handles complex titles correctly', () => {
document.title = `Something · With · Dots · ${SEARCH_WINDOW_TITLE} · GitLab`;
const result = buildDocumentTitle('test');
expect(result).toBe(`test · ${SEARCH_WINDOW_TITLE} · GitLab`);
});
});
});

View File

@ -229,4 +229,48 @@ describe('GlobalSearchTopbar', () => {
});
});
});
describe('search computed property setter', () => {
describe.each`
FF | scope | searchType | debounced
${{ zoektMultimatchFrontend: true }} | ${'blobs'} | ${'zoekt'} | ${true}
${{ zoektMultimatchFrontend: false }} | ${'blobs'} | ${'zoekt'} | ${false}
${{ zoektMultimatchFrontend: true }} | ${'issues'} | ${'zoekt'} | ${false}
${{ zoektMultimatchFrontend: true }} | ${'blobs'} | ${'advanced'} | ${false}
`(
'when isMultiMatch is $debounced (FF: $FF, scope: $scope, searchType: $searchType)',
({ FF, scope, searchType, debounced }) => {
beforeEach(() => {
getterSpies.currentScope = jest.fn(() => scope);
actionSpies.setQuery.mockClear();
createComponent({
featureFlag: FF,
initialState: { searchType },
});
wrapper.vm.debouncedSetQuery = jest.fn();
});
it(`${debounced ? 'calls debouncedSetQuery' : 'calls setQuery directly'}`, () => {
findGlSearchBox().vm.$emit('input', 'new search value');
if (debounced) {
expect(actionSpies.setQuery).not.toHaveBeenCalled();
} else {
expect(actionSpies.setQuery).toHaveBeenCalled();
const lastCallArgs = actionSpies.setQuery.mock.calls[0];
const payload = lastCallArgs[lastCallArgs.length - 1];
expect(payload).toEqual(
expect.objectContaining({
key: 'search',
value: 'new search value',
}),
);
}
});
},
);
});
});

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
RSpec.describe 'DeclarativePolicy authorization in GraphQL', feature_category: :shared do
include GraphqlHelpers
include Graphql::ResolverFactories

View File

@ -63,8 +63,10 @@ RSpec.describe SearchHelper, feature_category: :global_search do
shared_examples 'for users' do
let_it_be(:another_user) { create(:user, name: 'Jane Doe') }
let(:term) { 'jane' }
let_it_be(:project) { create(:project, developers: user) }
it 'returns users matching the term' do
project.add_developer(another_user)
result = search_autocomplete_opts(term)
expect(result.size).to eq(1)
expect(result.first[:id]).to eq(another_user.id)
@ -97,21 +99,29 @@ RSpec.describe SearchHelper, feature_category: :global_search do
it 'includes users with matching public emails' do
public_email_user
project.add_developer(public_email_user)
expect(ids).to include(public_email_user.id)
end
it 'includes users in forbidden states' do
banned_user
project.add_developer(banned_user)
expect(ids).to include(banned_user.id)
end
it 'includes users without matching public emails but with matching private emails' do
private_email_user
project.add_developer(private_email_user)
expect(ids).to include(private_email_user.id)
end
it 'includes users matching on secondary email' do
secondary_email
project.add_developer(user_with_other_email)
expect(ids).to include(secondary_email.user_id)
end
end
@ -123,21 +133,29 @@ RSpec.describe SearchHelper, feature_category: :global_search do
it 'includes users with matching public emails' do
public_email_user
project.add_developer(public_email_user)
expect(ids).to include(public_email_user.id)
end
it 'does not include users in forbidden states' do
banned_user
project.add_developer(banned_user)
expect(ids).not_to include(banned_user.id)
end
it 'does not include users without matching public emails but with matching private emails' do
private_email_user
project.add_developer(private_email_user)
expect(ids).not_to include(private_email_user.id)
end
it 'does not include users matching on secondary email' do
secondary_email
project.add_developer(secondary_email)
expect(ids).not_to include(secondary_email.user_id)
end
end
@ -146,6 +164,12 @@ RSpec.describe SearchHelper, feature_category: :global_search do
context 'with limiting' do
let_it_be(:users) { create_list(:user, 6, name: 'Jane Doe') }
before do
users.each do |user|
project.add_developer(user)
end
end
it 'only returns the first 5 users' do
result = search_autocomplete_opts(term)
expect(result.size).to eq(5)

View File

@ -93,9 +93,10 @@ RSpec.describe Banzai::Filter::SpacedLinkFilter, feature_category: :markdown do
expect(found_images[0]['alt']).to eq 'example'
end
described_class::IGNORE_PARENTS.each do |elem|
it "ignores valid links contained inside '#{elem}' element" do
exp = act = "<#{elem}>See #{link}</#{elem}>"
described_class::IGNORE_PARENTS.each do |xpath|
it "ignores valid links contained inside '#{xpath}' element" do
match = xpath.match(/(?<element>\w+)(?:\[@(?<attribute>.*)\])?/)
exp = act = "<#{match[:element]}#{" #{match[:attribute]}" if match[:attribute]}>See #{link}</#{match[:element]}>"
expect(filter(act).to_html).to eq exp
end

View File

@ -315,4 +315,17 @@ RSpec.describe Banzai::Pipeline::FullPipeline, feature_category: :markdown do
is_expected.to include '<span class="kc">true</span>'
end
end
describe 'math does not get rendered as link' do
[
"$[(a+b)c](d+e)$",
'$$[(a+b)c](d+e)$$',
'$`[(a+b)c](d+e)`$'
].each do |input|
it "when using '#{input}' as input" do
result = described_class.call(input, project: nil)[:output]
expect(result.css('a').first).to be_nil
end
end
end
end

View File

@ -7,7 +7,7 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do
include SearchHelpers
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
let_it_be(:user) { create(:user, username: 'foobar') }
let_it_be(:project) { create(:project, name: 'foo') }
let_it_be(:issue) { create(:issue, project: project, title: 'foo') }
let_it_be(:milestone) { create(:milestone, project: project, title: 'foo') }
@ -313,18 +313,166 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do
end
describe '#users' do
subject(:user_search_result) { results.objects('users') }
let_it_be(:another_user) { create(:user, username: 'barfoo') }
let_it_be(:group) { create(:group) }
it 'does not call the UsersFinder when the current_user is not allowed to read users list' do
allow(Ability).to receive(:allowed?).and_return(false)
expect(UsersFinder).not_to receive(:new).with(user, { search: 'foo', use_minimum_char_limit: false }).and_call_original
expect(UsersFinder).not_to receive(:new)
results.objects('users')
user_search_result
end
it 'calls the UsersFinder' do
expect(UsersFinder).to receive(:new).with(user, { search: 'foo', use_minimum_char_limit: false }).and_call_original
expected_params = {
search: 'foo',
use_minimum_char_limit: false
}
results.objects('users')
expect(UsersFinder).to receive(:new).with(user, expected_params).and_call_original
user_search_result
end
context 'when the autocomplete filter is added' do
let(:filters) { { autocomplete: true } }
shared_examples 'returns users' do
it 'returns the current_user since they match the query' do
expect(user_search_result).to match_array(user)
end
context 'when another user belongs to a project the current_user belongs to' do
before do
project.add_developer(another_user)
end
it 'includes the other user' do
expect(user_search_result).to match_array([user, another_user])
end
end
context 'when another user belongs to a group' do
before do
group.add_developer(another_user)
end
it 'does not include the other user' do
expect(user_search_result).not_to include(another_user)
end
context 'when the current_user also belongs to that group' do
before do
group.add_developer(user)
end
it 'includes the other user' do
expect(user_search_result).to match_array([user, another_user])
end
end
context 'when the current_user belongs to a parent of the group' do
let_it_be(:parent_group) { create(:group) }
let_it_be(:group) { create(:group, parent: parent_group) }
before do
parent_group.add_developer(user)
end
it 'includes the other user' do
expect(user_search_result).to match_array([user, another_user])
end
end
context 'when the current_user belongs to a group that is shared by the group' do
let_it_be_with_reload(:shared_with_group) { create(:group) }
let_it_be_with_reload(:group_group_link) do
create(
:group_group_link,
group_access: ::Gitlab::Access::GUEST,
shared_group: group,
shared_with_group: shared_with_group
)
end
before do
shared_with_group.add_developer(user)
end
it 'includes the other user' do
expect(user_search_result).to match_array([user, another_user])
end
end
context 'when the current_user belongs to a child of the group' do
let_it_be(:child_group) { create(:group, parent: group) }
before do
child_group.add_developer(user)
end
it 'includes the other user' do
expect(user_search_result).to match_array([user, another_user])
end
end
end
context 'when another user is a guest of a private group' do
let_it_be(:private_group) { create(:group, :private) }
before do
private_group.add_guest(another_user)
end
it 'does not include the other user' do
expect(user_search_result).to match_array(user)
end
context 'when the current_user is a guest of the private group' do
before do
private_group.add_guest(user)
end
it 'includes the other user' do
expect(user_search_result).to match_array([user, another_user])
end
end
context 'when the current_user is a guest of the public parent of the private group' do
let_it_be(:public_parent_group) { create(:group, :public) }
let_it_be(:private_group) { create(:group, :private, parent: public_parent_group) }
before do
public_parent_group.add_guest(user)
end
it 'includes the other user' do
expect(user_search_result).to match_array([user, another_user])
end
end
end
end
context 'when users_search_scoped_to_authorized_namespaces_basic_search is enabled' do
before do
stub_feature_flags(users_search_scoped_to_authorized_namespaces_basic_search: true)
stub_feature_flags(users_search_scoped_to_authorized_namespaces_basic_search_by_ids: false)
end
include_examples 'returns users'
end
context 'when users_search_scoped_to_authorized_namespaces_basic_search_by_ids is enabled' do
before do
stub_feature_flags(users_search_scoped_to_authorized_namespaces_basic_search_by_ids: true)
stub_feature_flags(users_search_scoped_to_authorized_namespaces_basic_search: false)
end
include_examples 'returns users'
end
end
end
end

View File

@ -2,54 +2,100 @@
require 'spec_helper'
RSpec.describe Gitlab::UrlSanitizer do
RSpec.describe Gitlab::UrlSanitizer, feature_category: :shared do
using RSpec::Parameterized::TableSyntax
describe '.sanitize' do
def sanitize_url(url)
# We want to try with multi-line content because is how error messages are formatted
described_class.sanitize(%(
remote: Not Found
fatal: repository `#{url}` not found
))
end
context 'when username and password is not passed' do
where(:url, :sanitized_url) do
# http(s), ssh, git, relative, and schemeless URLs should all be masked correctly
urls = ['http://', 'https://', 'ssh://', 'git://', '//', ''].flat_map do |protocol|
[
["#{protocol}test.com", "#{protocol}test.com"],
["#{protocol}test.com/", "#{protocol}test.com/"],
["#{protocol}test.com/path/to/repo.git", "#{protocol}test.com/path/to/repo.git"],
["#{protocol}user@test.com", "#{protocol}*****@test.com"],
["#{protocol}user:pass@test.com", "#{protocol}*****:*****@test.com"],
["#{protocol}user:@test.com", "#{protocol}*****@test.com"],
["#{protocol}:pass@test.com", "#{protocol}:*****@test.com"]
]
end
where(:input, :output) do
# http(s), ssh, git, relative, and schemeless URLs should all be masked correctly
urls = ['http://', 'https://', 'ssh://', 'git://', '//', ''].flat_map do |protocol|
[
["#{protocol}test.com", "#{protocol}test.com"],
["#{protocol}test.com/", "#{protocol}test.com/"],
["#{protocol}test.com/path/to/repo.git", "#{protocol}test.com/path/to/repo.git"],
["#{protocol}user@test.com", "#{protocol}*****@test.com"],
["#{protocol}user:pass@test.com", "#{protocol}*****:*****@test.com"],
["#{protocol}user:@test.com", "#{protocol}*****@test.com"],
["#{protocol}:pass@test.com", "#{protocol}:*****@test.com"]
]
# SCP-style URLs are left unmodified
urls << ['user@server:project.git', 'user@server:project.git']
urls << ['user:@server:project.git', 'user:@server:project.git']
urls << [':pass@server:project.git', ':pass@server:project.git']
urls << ['user:pass@server:project.git', 'user:pass@server:project.git']
urls << ['user:pass@server:123project.git', 'user:pass@server:123project.git']
urls << ['user:pass@server:1project3.git', 'user:pass@server:1project3.git']
urls << ['user:pass@server:project123.git', 'user:pass@server:project123.git']
urls << ['root@host:/root/ids/rules.tar.gz', 'root@host:/root/ids/rules.tar.gz']
urls << ['user:pass@server.com:/3000/path.git', 'user:pass@server.com:/3000/path.git']
# actual URLs that look like SCP-styled URLS
urls << ['username:password@test.com', '*****:*****@test.com']
urls << ['username:password@test.com:1234', '*****:*****@test.com:1234']
urls << ['username:password@test.com:1234/org/project', '*****:*****@test.com:1234/org/project']
urls << ['username:password@test.com:1234/org/project.git', '*****:*****@test.com:1234/org/project.git']
end
# SCP-style URLs are left unmodified
urls << ['user@server:project.git', 'user@server:project.git']
urls << ['user:@server:project.git', 'user:@server:project.git']
urls << [':pass@server:project.git', ':pass@server:project.git']
urls << ['user:pass@server:project.git', 'user:pass@server:project.git']
urls << ['user:pass@server:123project.git', 'user:pass@server:123project.git']
urls << ['user:pass@server:1project3.git', 'user:pass@server:1project3.git']
urls << ['user:pass@server:project123.git', 'user:pass@server:project123.git']
urls << ['root@host:/root/ids/rules.tar.gz', 'root@host:/root/ids/rules.tar.gz']
with_them do
subject do
# We want to try with multi-line content because is how error messages are formatted
described_class.sanitize(<<~CONTENT)
remote: Not Found
fatal: repository `#{url}` not found
CONTENT
end
# actual URLs that look like SCP-styled URLS
urls << ['username:password@test.com', '*****:*****@test.com']
urls << ['username:password@test.com:1234', '*****:*****@test.com:1234']
urls << ['username:password@test.com:1234/org/project', '*****:*****@test.com:1234/org/project']
urls << ['username:password@test.com:1234/org/project.git', '*****:*****@test.com:1234/org/project.git']
let(:sanitized_output) do
<<~CONTENT
remote: Not Found
fatal: repository `#{sanitized_url}` not found
CONTENT
end
# return an empty string for invalid URLs
urls << ['ssh://', '']
it { is_expected.to eq(sanitized_output) }
end
end
with_them do
it { expect(sanitize_url(input)).to include("repository `#{output}` not found") }
context 'when username and password is passed' do
where(:url, :user, :password, :sanitized_url) do
# http(s), ssh, git, relative, and schemeless URLs should all be masked correctly
urls = ['http://', 'https://', 'ssh://', 'git://', '//', ''].flat_map do |protocol|
[
["#{protocol}user@@test.com", 'user@', nil, "#{protocol}*****@test.com"],
["#{protocol}user@#:pass !@test.com", 'user@#', 'pass !', "#{protocol}*****:*****@test.com"],
["#{protocol};/?:@&=+$,\\[\\]:@test.com", ";/?:@&=+$,\\[\\]", '', "#{protocol}*****@test.com"],
["#{protocol}:;/?:@&=+$,\\[\\]@test.com", '', ';/?:@&=+$,\\[\\]', "#{protocol}:*****@test.com"]
]
end
# actual URLs that look like SCP-styled URLS
urls << ["user@@test.com", 'user@', '', "*****@test.com"]
urls << ["user@#:pass !@test.com", 'user@#', 'pass !', "*****:*****@test.com"]
urls << [";/?:@&=+$,\\[\\]:@test.com", ";/?:@&=+$,\\[\\]", nil, "*****@test.com"]
urls << [":;/?:@&=+$,\\[\\]@test.com", nil, ';/?:@&=+$,\\[\\]', ":*****@test.com"]
end
with_them do
subject do
# We want to try with multi-line content because that is how error messages are formatted
described_class.sanitize(<<~CONTENT, user: user, password: password)
remote: Not Found
fatal: repository `#{url}` not found
CONTENT
end
let(:sanitized_output) do
<<~CONTENT
remote: Not Found
fatal: repository `#{sanitized_url}` not found
CONTENT
end
it { is_expected.to eq(sanitized_output) }
end
end
end

View File

@ -2212,6 +2212,56 @@ RSpec.describe Notify, feature_category: :code_review_workflow do
is_expected.to have_body_text project_merge_request_path(project, merge_request)
end
end
context 'with internal notes' do
let!(:review) { create(:review, project: project, merge_request: merge_request) }
let!(:notes) { nil } # This was done to avoid the creation of notes defined in the parent context
before do
create(:note, :internal, note: 'Internal note 1', review: review, project: project, author: reviewer, noteable: merge_request)
create(:note, :internal, note: 'Internal note 2', review: review, project: project, author: reviewer, noteable: merge_request)
end
subject { described_class.new_review_email(recipient.id, review.id).text_part }
context 'when the review contains both public and internal notes' do
before do
create(:note, note: 'Public note 1', review: review, project: project, author: reviewer, noteable: merge_request)
end
context 'when the recipient can read internal notes' do
before do
project.add_maintainer(recipient)
end
it 'includes all notes', :aggregate_failures do
expect(subject).to have_body_text 'Internal note 1'
expect(subject).to have_body_text 'Internal note 2'
expect(subject).to have_body_text 'Public note 1'
end
end
context 'when the recipient cannot read internal notes' do
let(:recipient) { create(:user, guest_of: project) }
it 'does not include internal notes', :aggregate_failures do
expect(subject).not_to have_body_text 'Internal note 1'
expect(subject).not_to have_body_text 'Internal note 2'
expect(subject).to have_body_text 'Public note 1'
end
end
end
context 'when the review contains internal notes only' do
context 'when the recipient cannot read internal notes' do
let(:recipient) { create(:user, guest_of: project) }
it 'does not send an email' do
expect(subject).to be_nil
end
end
end
end
end
describe 'rate limiting', :freeze_time, :clean_gitlab_redis_rate_limiting do

View File

@ -80,7 +80,8 @@ RSpec.describe ProjectImportState, type: :model, feature_category: :importers do
end
describe '#mark_as_failed' do
let(:error_message) { 'some message' }
let(:error_message) { 'http://user:password@gitlab.com/group/project.git failed to download' }
let(:sanitized_error_message) { 'http://*****:*****@gitlab.com/group/project.git failed to download' }
it 'logs error when update column fails' do
allow(import_state).to receive(:update_column).and_raise(ActiveRecord::ActiveRecordError)
@ -90,7 +91,7 @@ RSpec.describe ProjectImportState, type: :model, feature_category: :importers do
{
error: 'ActiveRecord::ActiveRecordError',
message: 'Error setting import status to failed',
original_error: error_message
original_error: sanitized_error_message
}
)
end
@ -101,7 +102,7 @@ RSpec.describe ProjectImportState, type: :model, feature_category: :importers do
it 'updates last_error with error message' do
import_state.mark_as_failed(error_message)
expect(import_state.last_error).to eq(error_message)
expect(import_state.last_error).to eq(sanitized_error_message)
end
it 'removes project import data' do

View File

@ -188,8 +188,8 @@ RSpec.describe RemoteMirror, :mailer, feature_category: :source_code_management
end
describe '#mark_as_failed!' do
let(:remote_mirror) { create(:remote_mirror) }
let(:error_message) { 'http://user:pass@test.com/root/repoC.git/' }
let(:remote_mirror) { create(:remote_mirror, credentials: { user: 'user @ # !', password: 'password @ # !' }) }
let(:error_message) { "http://#{remote_mirror.user}:#{remote_mirror.password}@test.com/root/repoC.git/" }
let(:sanitized_error_message) { 'http://*****:*****@test.com/root/repoC.git/' }
subject do

View File

@ -65,6 +65,12 @@ RSpec.describe BasePolicy do
end
end
context 'with the admin bot user' do
let(:current_user) { ::Users::Internal.admin_bot }
it { is_expected.to be_allowed(ability) }
end
context 'with anonymous' do
let(:current_user) { nil }

View File

@ -1298,7 +1298,7 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
end
context 'all other user types' do
User::USER_TYPES.except(:human, :project_bot, :placeholder, :import_user).each_value do |user_type|
User::USER_TYPES.except(:human, :project_bot, :admin_bot, :placeholder, :import_user).each_value do |user_type|
context "with user_type #{user_type}" do
before do
current_user.update!(user_type: user_type)

View File

@ -92,7 +92,9 @@ RSpec.describe Packages::Policies::DependencyProxy::GroupPolicy, feature_categor
end
context 'with all other user types' do
User::USER_TYPES.except(:human, :project_bot, :security_policy_bot, :placeholder).each_value do |user_type|
excluded_types = %i[human project_bot security_policy_bot admin_bot placeholder]
User::USER_TYPES.except(*excluded_types).each_value do |user_type|
context "with user_type #{user_type}" do
let_it_be(:auth_token) { create(:personal_access_token, user: non_group_member) }

View File

@ -106,4 +106,38 @@ RSpec.describe 'Projects blob controller', feature_category: :code_review_workfl
end
end
end
describe 'POST preview' do
let(:content) { 'Some content' }
def do_post(content)
post namespace_project_preview_blob_path(
namespace_id: project.namespace,
project_id: project,
id: 'master/CHANGELOG'
), params: { content: content }
end
context 'when content is within size limit' do
it 'returns success and renders the preview' do
do_post(content)
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['Content-Type']).to include('text/html')
end
end
context 'when content exceeds size limit' do
before do
stub_const('Projects::BlobController::MAX_PREVIEW_CONTENT', 1.byte)
end
it 'returns payload too large error' do
do_post(content)
expect(response).to have_gitlab_http_status(:payload_too_large)
expect(json_response['errors']).to include('Preview content too large')
end
end
end
end

View File

@ -9,15 +9,16 @@ module SearchHelpers
end
def submit_search(query)
# Forms directly on the search page
if page.has_css?('.search-page-form')
search_form = '.search-page-form'
# Open search modal from super sidebar
else
find_by_testid('super-sidebar-search-button').click
search_form = '#super-sidebar-search-modal'
end
wait_for_all_requests
page.within(search_form) do
field = find_field('search')
field.click

View File

@ -108,6 +108,7 @@ RSpec.shared_context 'GroupPolicy context' do
admin_integrations
set_issue_updated_at
set_issue_created_at
activate_group_member
]
end

24
vendor/gems/graphql/.gitlab-ci.yml vendored Normal file
View File

@ -0,0 +1,24 @@
include:
- local: gems/gem.gitlab-ci.yml
inputs:
gem_name: "graphql"
gem_path_prefix: "vendor/gems/"
rspec:
extends: .default
before_script:
- apt-get update -qq
- apt-get install -qq -y cmake
- cmake --version
- cd vendor/gems/graphql
- ruby -v # Print out ruby version for debugging
- gem update --system
- bundle_version=$(grep -A 1 "BUNDLED WITH" Gemfile.lock | tail -n 1 | sed -e 's/[[:space:]]//')
- gem install bundler --version "$bundle_version" --no-document # Bundler is not installed with the image
- bundle config # Show bundler configuration
- bundle install --jobs=$(nproc) --retry=3
script:
- bundle exec rake test
parallel:
matrix:
- RUBY_VERSION: ["${RUBY_VERSION_DEFAULT}", "${RUBY_VERSION_NEXT}"]

5
vendor/gems/graphql/.yardopts vendored Normal file
View File

@ -0,0 +1,5 @@
--no-private
--markup=markdown
--readme=readme.md
--title='GraphQL Ruby API Documentation'
'lib/**/*.rb' - '*.md'

View File

@ -0,0 +1,198 @@
# graphql-enterprise
### Breaking Changes
### Deprecations
### New Features
### Bug Fix
# 1.5.6 (13 Dec 2024)
- ObjectCache: Add `CacheableRelation` helper for top-level ActiveRecord relations
# 1.5.5 (10 Dec 2024)
- Changesets: Add missing `ensure_loaded` call for class-based changesets
# 1.5.4 (31 Oct 2024)
- ObjectCache: Add `reauthorize_cached_objects: false`
# 1.5.3 (1 Oct 2024)
- Limiters: Add expiration to rate limit data (to reduce Redis footprint)
# 1.5.2 (6 Sept 2024)
- Limiters: Add `connection_pool:` support
# 1.5.1 (30 Aug 2024)
- ObjectCache: Add `connection_pool:` support
# 1.5.0 (26 Jul 2024)
- ObjectCache: Add Dalli backend for Memcached
# 1.4.2 (11 Jun 2024)
- ObjectCache: Add `Schema.fingerprint` hook and `context[:refresh_object_cache]`
# 1.4.1 (30 May 2024)
- ObjectCache: properly handle when object fingerprints are evicted but the cached result wasn't
# 1.4.0 (11 Apr 2024)
- ObjectCache: add support for `redis_cluster: ...` backend
# 1.3.4 (18 Mar 2024)
- ObjectCache: use new `trace_with` API for instrumentation
# 1.3.3 (30 Jan 2024)
- ObjectCache: fix compatibility with `run_graphql_field` test helper #4816
# 1.3.2 (15 Jan 2024)
### Bug Fix
- Limiters: Migrate to new `trace_with` instrumentation API, requires GraphQL-Ruby 2.0.18+
# 1.3.1 (12 June 2023)
### Bug Fix
- Add missing `require "graphql"` #4511
# 1.3.0 (29 May 2023)
### New Features
- Changesets: Add `added_in: ...` and `removed_in: ...` for inline definition changes
# 1.2.0 (10 February 2023)
### New Features
- Support the `redis-client` gem as `redis:` (requires graphql-pro 1.24.0+)
# 1.1.14 (3 November 2022)
### New Features
- Limiters: Support `dashboard_charts: false` to disable built-in instrumentation
- Limiters: Support `assign_as:` to use a different accessor method for storing limiter instances on schema classes (add a corresponding `class << self; attr_accessor ...; end` to the schema class to use it)
- Limiters: Support `context_key:` to put runtime info in a different key in query context
- Runtime Limiter: Add `window_ms:` to runtime info
# 1.1.13 (21 October 2022)
### Bug Fix
- Limiter: handle missing fields in MutationLimiter
# 1.1.12 (18 October 2022)
### New Features
- Limiters: add MutationLimiter
### Bug Fix
- ObjectCache: Update Redis calls to support redis-rb 5.0
# 1.1.11 (25 August 2022)
### Bug Fix
- ObjectCache: also update `delete` to handle more than 1000 objects in Lua
# 1.1.10 (19 August 2022)
### Bug Fix
- ObjectCache: read and write objects 1000-at-a-time to avoid overloading Lua scripts in Redis
# 1.1.9 (3 August 2022)
### New Features
- ObjectCache: Add a message to context when a type or field causes a query to be treated as "private"
### Bug Fix
- ObjectCache: skip the query analyzer when `context[:skip_object_cache]` is present
# 1.1.8 (1 August 2022)
### New Features
- ObjectCache: Add `ObjectType.cache_dependencies_for(object, context)` to customize dependencies for an object
### Bug Fix
- ObjectCache: Fix to make `context[:object_cache][:objects]` a Set
# 1.1.7 (28 July 2022)
### Bug Fix
- ObjectCache: remove needless `resolve_type` calls
# 1.1.6 (28 July 2022)
### Bug Fix
- ObjectCache: persist the type names of cached objects, pass them to `Schema.resolve_type` when validating cached responses.
# 1.1.5 (22 July 2022)
### New Features
- ObjectCache: add `cache_introspection: { ttl: ... }` for setting an expiration (in seconds) on introspection fields.
# 1.1.4 (19 March 2022)
### Bug Fix
- ObjectCache: don't create a cache fingerprint if the query is found to be uncacheable during analysis.
# 1.1.3 (3 March 2022)
### Bug Fix
- Changesets: Return an empty set when a schema doesn't use changesets #3972
# 1.1.2 (1 March 2022)
### New Features
- Changesets: Add introspection methods `Schema.changesets` and `Changeset.changes`
# 1.1.1 (14 February 2021)
### Bug Fix
- Changesets: don't require `context.schema` for plain-Ruby calls to introspection methods #3929
# 1.1.0 (24 November 2021)
### New Features
- Changesets: Add `GraphQL::Enterprise::Changeset`
# 1.0.1 (9 November 2021)
### Bug Fix
- Object Cache: properly handle invalid queries #3703
# 1.0.0 (13 October 2021)
### New Features
- Rate limiters: first release
- Object cache: first release

1331
vendor/gems/graphql/CHANGELOG-pro.md vendored Normal file

File diff suppressed because it is too large Load Diff

176
vendor/gems/graphql/CHANGELOG-relay.md vendored Normal file
View File

@ -0,0 +1,176 @@
# graphql-relay
### Breaking Changes
### Deprecations
### New Features
### Bug Fix
## 0.12.0 (21 Jul 2016)
### Breaking Changes
- Don't cache a global node identification config #51
To migrate, assign your node identification helper to the schema:
```ruby
NodeIdentification = GraphQL::Relay::GlobalNodeIdentification.define { ... }
MySchema.node_identification = NodeIdentification
```
### New Features
- Support lazy definition blocks from graphql-ruby 0.17
- Add `startCursor` and `endCursor` to `PageInfo` #60
### Bug Fix
- Support `field:` keyword for connection helper #58
## 0.11.2 (6 Jul 2016)
### New Features
- Include description for built-in objects #55
## 0.11.1 (24 Jun 2016)
### Bug Fix
- Correctly pass parent object to Connections #53
## 0.11.0 (19 Jun 2016)
### Breaking Changes
- `BaseType.define_connection` no longer caches the result to use as the default `BaseType.connection_type`. Now, store the result of `.define_connection` in a variable and pass that variable into the schema:
```ruby
# Capture the returned type:
SomethingCustomConnectionType = SomethingType.define_connection { ... }
DifferentThingType = GraphQL::ObjectType.define do
# And pass it to the connection helper:
connection :somethings, SomethingCustomConnectionType
end
```
### New Features
- Support for custom edge types / classes #50
- Support for multiple connection classes #50
## 0.10.0 (31 May 2016)
### New Feature
- Support `graphql` 0.14.0 #47
### Bug Fix
- Use strings as argument names, not symbols #47
## 0.9.5
### New Feature
- Root `id` field may have a description #43
## 0.9.4 (29 Apr 2016)
### Bug Fix
- Fix Node interface to support GraphQL 0.13.0+
## 0.9.2 (29 Apr 2016)
### Bug Fix
- Fix Node interface when type_from_object returns nil
## 0.9.1 (6 Apr 2016)
### Bug Fix
- Respond to connection fields without any pagination arguments
- Limit by `max_page_size` even when no arguments are present
## 0.9.0 (30 Mar 2016)
### Breaking change
- Remove the `order` argument from connection fields. This isn't part of the spec and shouldn't have been there in the first place!
You can implement this behavior with a custom argument, for example:
```ruby
field :cities, CityType.connection_type do
argument :order, types.String, default_value: "name"
resolve ->(obj, args, ctx) {
obj.order(args[:order])
}
end
```
### Bug Fix
- Include the MIT license in the project's source
## 0.8.1 (22 Mar 2016)
### Bug Fix
- Accept description for Mutations
## 0.8.0 (20 Mar 2016)
### New Feature
- Accept configs for `to_global_id` and `from_global_id`
- Support `graphql` 0.12+
## 0.7.1 (29 Feb 2016)
### Bug Fix
- Limit the `count(*)` when testing next page with ActiveRecord #28
## 0.7.0 (20 Feb 2016)
### New Feature
- `max_page_size` option for connections
- Support ActiveSupport 5.0.0.beta2
## 0.6.2 (11 Feb 2016)
### Bug Fix
- Correctly cast values from connection cursors #21
- Use class _name_ instead of class _object_ when finding a connection implementation (to support Rails autoloading) #16
## 0.6.1 (14 Dec 2015)
### Bug Fix
- Stringify `id` when passed into `to_global_id`
## 0.6.0 (11 Dec 2015)
### Breaking Change
- `GlobalNodeIdentification#object_from_id(id, ctx)` now accepts context as the second argument #9
## 0.5.1 (11 Dec 2015)
### Feature
- Allow custom UUID join string #15
### Bug Fix
- Remove implicit ActiveSupport dependency #14

4646
vendor/gems/graphql/CHANGELOG.md vendored Normal file

File diff suppressed because it is too large Load Diff

1
vendor/gems/graphql/CNAME vendored Normal file
View File

@ -0,0 +1 @@
graphql-ruby.org

25
vendor/gems/graphql/Gemfile vendored Normal file
View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
source "https://rubygems.org"
gemspec
gem 'bootsnap' # required by the Rails apps generated in tests
gem 'stackprof', platform: :ruby
gem 'pry'
gem 'pry-stack_explorer', platform: :ruby
gem 'pry-byebug'
if RUBY_VERSION >= "3.0"
gem "libev_scheduler"
gem "evt"
end
if RUBY_VERSION >= "3.1.1"
gem "async", "~>2.0"
end
# Required for running `jekyll algolia ...` (via `rake site:update_search_index`)
group :jekyll_plugins do
gem 'jekyll-algolia', '~> 1.0'
gem 'jekyll-redirect-from'
end

324
vendor/gems/graphql/Gemfile.lock vendored Normal file
View File

@ -0,0 +1,324 @@
PATH
remote: .
specs:
graphql (2.4.11)
base64
fiber-storage
logger
GEM
remote: https://rubygems.org/
specs:
addressable (2.8.7)
public_suffix (>= 2.0.2, < 7.0)
algolia_html_extractor (2.6.4)
json (~> 2.0)
nokogiri (~> 1.10)
algoliasearch (1.27.5)
httpclient (~> 2.8, >= 2.8.3)
json (>= 1.5.1)
ansi (1.5.0)
ast (2.4.2)
async (2.23.0)
console (~> 1.29)
fiber-annotation
io-event (~> 1.9)
metrics (~> 0.12)
traces (~> 0.15)
base64 (0.2.0)
benchmark-ips (2.14.0)
bigdecimal (3.1.9)
binding_of_caller (1.0.1)
debug_inspector (>= 1.2.0)
bootsnap (1.18.4)
msgpack (~> 1.2)
builder (3.3.0)
byebug (11.1.3)
coderay (1.1.3)
colorator (1.1.0)
concurrent-ruby (1.3.5)
console (1.30.0)
fiber-annotation
fiber-local (~> 1.1)
json
csv (3.3.2)
debug_inspector (1.2.0)
docile (1.4.1)
em-websocket (0.5.3)
eventmachine (>= 0.12.9)
http_parser.rb (~> 0)
eventmachine (1.2.7)
evt (0.4.0)
faraday (2.12.2)
faraday-net_http (>= 2.0, < 3.5)
json
logger
faraday-net_http (3.4.0)
net-http (>= 0.5.0)
ffi (1.17.1-aarch64-linux-gnu)
ffi (1.17.1-aarch64-linux-musl)
ffi (1.17.1-arm-linux-gnu)
ffi (1.17.1-arm-linux-musl)
ffi (1.17.1-arm64-darwin)
ffi (1.17.1-x86_64-darwin)
ffi (1.17.1-x86_64-linux-gnu)
ffi (1.17.1-x86_64-linux-musl)
fiber-annotation (0.2.0)
fiber-local (1.1.0)
fiber-storage
fiber-storage (1.0.0)
filesize (0.2.0)
forwardable-extended (2.6.0)
gitlab (4.20.1)
httparty (~> 0.20)
terminal-table (>= 1.5.1)
google-protobuf (4.30.0)
bigdecimal
rake (>= 13)
google-protobuf (4.30.0-aarch64-linux)
bigdecimal
rake (>= 13)
google-protobuf (4.30.0-arm64-darwin)
bigdecimal
rake (>= 13)
google-protobuf (4.30.0-x86_64-darwin)
bigdecimal
rake (>= 13)
google-protobuf (4.30.0-x86_64-linux)
bigdecimal
rake (>= 13)
graphql-batch (0.6.0)
graphql (>= 1.12.18, < 3)
promise.rb (~> 0.7.2)
http_parser.rb (0.8.0)
httparty (0.22.0)
csv
mini_mime (>= 1.0.0)
multi_xml (>= 0.5.2)
httpclient (2.9.0)
mutex_m
i18n (1.14.7)
concurrent-ruby (~> 1.0)
imagen (0.2.0)
parser (>= 2.5, != 2.5.1.1)
io-event (1.9.0)
jekyll (4.4.1)
addressable (~> 2.4)
base64 (~> 0.2)
colorator (~> 1.0)
csv (~> 3.0)
em-websocket (~> 0.5)
i18n (~> 1.0)
jekyll-sass-converter (>= 2.0, < 4.0)
jekyll-watch (~> 2.0)
json (~> 2.6)
kramdown (~> 2.3, >= 2.3.1)
kramdown-parser-gfm (~> 1.0)
liquid (~> 4.0)
mercenary (~> 0.3, >= 0.3.6)
pathutil (~> 0.9)
rouge (>= 3.0, < 5.0)
safe_yaml (~> 1.0)
terminal-table (>= 1.8, < 4.0)
webrick (~> 1.7)
jekyll-algolia (1.7.1)
algolia_html_extractor (~> 2.6)
algoliasearch (~> 1.26)
filesize (~> 0.1)
jekyll (>= 3.6, < 5.0)
json (~> 2.0)
nokogiri (~> 1.6)
progressbar (~> 1.9)
verbal_expressions (~> 0.1.5)
jekyll-redirect-from (0.16.0)
jekyll (>= 3.3, < 5.0)
jekyll-sass-converter (2.2.0)
sassc (> 2.0.1, < 3.0)
jekyll-watch (2.2.1)
listen (~> 3.0)
json (2.10.1)
kramdown (2.5.1)
rexml (>= 3.3.9)
kramdown-parser-gfm (1.1.0)
kramdown (~> 2.0)
language_server-protocol (3.17.0.4)
libev_scheduler (0.2)
lint_roller (1.1.0)
liquid (4.0.4)
listen (3.9.0)
rb-fsevent (~> 0.10, >= 0.10.3)
rb-inotify (~> 0.9, >= 0.9.10)
logger (1.6.6)
m (1.5.1)
method_source (>= 0.6.7)
rake (>= 0.9.2.2)
memory_profiler (1.1.0)
mercenary (0.4.0)
method_source (1.1.0)
metrics (0.12.1)
mini_mime (1.1.5)
minitest (5.25.4)
minitest-focus (1.4.0)
minitest (>= 4, < 6)
minitest-reporters (1.7.1)
ansi
builder
minitest (>= 5.0)
ruby-progressbar
msgpack (1.8.0)
multi_xml (0.7.1)
bigdecimal (~> 3.1)
mutex_m (0.3.0)
net-http (0.6.0)
uri
nokogiri (1.18.3-aarch64-linux-gnu)
racc (~> 1.4)
nokogiri (1.18.3-aarch64-linux-musl)
racc (~> 1.4)
nokogiri (1.18.3-arm-linux-gnu)
racc (~> 1.4)
nokogiri (1.18.3-arm-linux-musl)
racc (~> 1.4)
nokogiri (1.18.3-arm64-darwin)
racc (~> 1.4)
nokogiri (1.18.3-x86_64-darwin)
racc (~> 1.4)
nokogiri (1.18.3-x86_64-linux-gnu)
racc (~> 1.4)
nokogiri (1.18.3-x86_64-linux-musl)
racc (~> 1.4)
octokit (9.2.0)
faraday (>= 1, < 3)
sawyer (~> 0.9)
parallel (1.26.3)
parser (3.3.7.1)
ast (~> 2.4.1)
racc
pathutil (0.16.2)
forwardable-extended (~> 2.6)
progressbar (1.13.0)
promise.rb (0.7.4)
pronto (0.11.3)
gitlab (>= 4.4.0, < 5.0)
httparty (>= 0.13.7, < 1.0)
octokit (>= 4.7.0, < 10.0)
rainbow (>= 2.2, < 4.0)
rexml (>= 3.2.5, < 4.0)
rugged (>= 0.23.0, < 2.0)
thor (>= 0.20.3, < 2.0)
pronto-undercover (0.2.0)
pronto (>= 0.9, < 0.12)
undercover (~> 0.4.3)
pry (0.14.2)
coderay (~> 1.1)
method_source (~> 1.0)
pry-byebug (3.10.1)
byebug (~> 11.0)
pry (>= 0.13, < 0.15)
pry-stack_explorer (0.6.1)
binding_of_caller (~> 1.0)
pry (~> 0.13)
public_suffix (6.0.1)
racc (1.8.1)
rainbow (3.1.1)
rake (13.2.1)
rake-compiler (1.2.9)
rake
rb-fsevent (0.11.2)
rb-inotify (0.11.1)
ffi (~> 1.0)
regexp_parser (2.10.0)
rexml (3.4.1)
rouge (4.5.1)
rubocop (1.73.2)
json (~> 2.3)
language_server-protocol (~> 3.17.0.2)
lint_roller (~> 1.1.0)
parallel (~> 1.10)
parser (>= 3.3.0.2)
rainbow (>= 2.2.2, < 4.0)
regexp_parser (>= 2.9.3, < 3.0)
rubocop-ast (>= 1.38.0, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 4.0)
rubocop-ast (1.38.1)
parser (>= 3.3.1.0)
ruby-progressbar (1.13.0)
rugged (1.6.5)
safe_yaml (1.0.5)
sassc (2.4.0)
ffi (~> 1.9)
sawyer (0.9.2)
addressable (>= 2.3.5)
faraday (>= 0.17.3, < 3)
simplecov (0.22.0)
docile (~> 1.1)
simplecov-html (~> 0.11)
simplecov_json_formatter (~> 0.1)
simplecov-html (0.13.1)
simplecov-lcov (0.8.0)
simplecov_json_formatter (0.1.4)
stackprof (0.2.27)
terminal-table (3.0.2)
unicode-display_width (>= 1.1.1, < 3)
thor (1.3.2)
traces (0.15.2)
undercover (0.4.7)
imagen (>= 0.1.8)
rainbow (>= 2.1, < 4.0)
rugged (>= 0.27, < 1.7)
unicode-display_width (2.6.0)
uri (1.0.3)
verbal_expressions (0.1.5)
webrick (1.9.1)
yard (0.9.37)
PLATFORMS
aarch64-linux
aarch64-linux-gnu
aarch64-linux-musl
arm-linux-gnu
arm-linux-musl
arm64-darwin
x86_64-darwin
x86_64-linux
x86_64-linux-gnu
x86_64-linux-musl
DEPENDENCIES
async (~> 2.0)
benchmark-ips
bootsnap
concurrent-ruby (~> 1.0)
evt
google-protobuf
graphql!
graphql-batch
jekyll
jekyll-algolia (~> 1.0)
jekyll-redirect-from
jekyll-sass-converter (~> 2.2)
libev_scheduler
m (~> 1.5.0)
memory_profiler
minitest
minitest-focus
minitest-reporters
mutex_m
pronto
pronto-undercover
pry
pry-byebug
pry-stack_explorer
rake
rake-compiler
rubocop
simplecov
simplecov-lcov
stackprof
undercover
webrick
yard
BUNDLED WITH
2.6.5

20
vendor/gems/graphql/MIT-LICENSE vendored Normal file
View File

@ -0,0 +1,20 @@
Copyright 2015 Robert Mosolgo
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

240
vendor/gems/graphql/Rakefile vendored Normal file
View File

@ -0,0 +1,240 @@
# frozen_string_literal: true
require "bundler/gem_helper"
Bundler::GemHelper.install_tasks
require "rake/testtask"
require_relative "guides/_tasks/site"
require_relative "lib/graphql/rake_task/validate"
require 'rake/extensiontask'
Rake::TestTask.new do |t|
t.libs << "spec" << "lib" << "graphql-c_parser/lib"
exclude_integrations = []
['mongoid', 'rails'].each do |integration|
begin
require integration
rescue LoadError
exclude_integrations << integration
end
end
t.test_files = FileList.new("spec/**/*_spec.rb") do |fl|
fl.exclude(*exclude_integrations.map { |int| "spec/integration/#{int}/**/*" })
end
# After 2.7, there were not warnings for uninitialized ivars anymore
if RUBY_VERSION < "3"
t.warning = false
end
end
require 'rubocop/rake_task'
RuboCop::RakeTask.new
default_tasks = [:test, :rubocop]
if ENV["SYSTEM_TESTS"]
task(default: ["test:system"] + default_tasks)
else
task(default: default_tasks)
end
def assert_dependency_version(dep_name, required_version, check_script)
version = `#{check_script}`
if !version.include?(required_version)
raise <<-ERR
build_parser requires #{dep_name} version "#{required_version}", but found:
$ #{check_script}
> #{version}
To fix this issue:
- Update #{dep_name} to the required version
- Update the assertion in `Rakefile` to match the current version
ERR
end
end
namespace :bench do
def prepare_benchmark
$LOAD_PATH << "./lib" << "./spec/support"
require_relative("./benchmark/run.rb")
end
desc "Benchmark parsing"
task :parse do
prepare_benchmark
GraphQLBenchmark.run("parse")
end
desc "Benchmark lexical analysis"
task :scan do
prepare_benchmark
GraphQLBenchmark.run("scan")
end
desc "Benchmark the introspection query"
task :query do
prepare_benchmark
GraphQLBenchmark.run("query")
end
desc "Benchmark validation of several queries"
task :validate do
prepare_benchmark
GraphQLBenchmark.run("validate")
end
desc "Profile a validation"
task :validate_memory do
prepare_benchmark
GraphQLBenchmark.validate_memory
end
desc "Generate a profile of the introspection query"
task :profile do
prepare_benchmark
GraphQLBenchmark.profile
end
desc "Run benchmarks on a very large result"
task :profile_large_result do
prepare_benchmark
GraphQLBenchmark.profile_large_result
end
desc "Run benchmarks on a small result"
task :profile_small_result do
prepare_benchmark
GraphQLBenchmark.profile_small_result
end
desc "Run introspection on a small schema"
task :profile_small_introspection do
prepare_benchmark
GraphQLBenchmark.profile_small_introspection
end
desc "Dump schema to SDL"
task :profile_to_definition do
prepare_benchmark
GraphQLBenchmark.profile_to_definition
end
desc "Load schema from SDL"
task :profile_from_definition do
prepare_benchmark
GraphQLBenchmark.profile_from_definition
end
desc "Compare GraphQL-Batch and GraphQL-Dataloader"
task :profile_batch_loaders do
prepare_benchmark
GraphQLBenchmark.profile_batch_loaders
end
desc "Run benchmarks on schema creation"
task :profile_boot do
prepare_benchmark
GraphQLBenchmark.profile_boot
end
desc "Check the memory footprint of a large schema"
task :profile_schema_memory_footprint do
prepare_benchmark
GraphQLBenchmark.profile_schema_memory_footprint
end
desc "Check the depth of the stacktrace during execution"
task :profile_stack_depth do
prepare_benchmark
GraphQLBenchmark.profile_stack_depth
end
desc "Run a very big introspection query"
task :profile_large_introspection do
prepare_benchmark
GraphQLBenchmark.profile_large_introspection
end
task :profile_small_query_on_large_schema do
prepare_benchmark
GraphQLBenchmark.profile_small_query_on_large_schema
end
desc "Run analysis on a big query"
task :profile_large_analysis do
prepare_benchmark
GraphQLBenchmark.profile_large_analysis
end
desc "Run analysis on parsing"
task :profile_parse do
prepare_benchmark
GraphQLBenchmark.profile_parse
end
end
namespace :test do
desc "Run system tests for ActionCable subscriptions"
task :system do
success = Dir.chdir("spec/dummy") do
system("bundle install")
system("bundle exec bin/rails test:system")
end
success || abort
end
task js: "js:test"
end
namespace :js do
client_dir = "./javascript_client"
desc "Run the tests for javascript_client"
task :test do
success = Dir.chdir(client_dir) do
system("yarn run test")
end
success || abort
end
desc "Install JS dependencies"
task :install do
Dir.chdir(client_dir) do
system("yarn install")
end
end
desc "Compile TypeScript to JavaScript"
task :build do
Dir.chdir(client_dir) do
system("yarn tsc")
end
end
task all: [:install, :build, :test]
end
task :build_c_lexer do
assert_dependency_version("Ragel", "7.0.4", "ragel -v")
`ragel -F1 graphql-c_parser/ext/graphql_c_parser_ext/lexer.rl`
end
Rake::ExtensionTask.new("graphql_c_parser_ext") do |t|
t.ext_dir = 'graphql-c_parser/ext/graphql_c_parser_ext'
t.lib_dir = "graphql-c_parser/lib/graphql"
end
task :build_yacc_parser do
assert_dependency_version("Bison", "3.8", "yacc --version")
`yacc graphql-c_parser/ext/graphql_c_parser_ext/parser.y -o graphql-c_parser/ext/graphql_c_parser_ext/parser.c -Wyacc`
end
task :move_binary do
# For some reason my local env doesn't respect the `lib_dir` configured above
`mv graphql-c_parser/lib/*.bundle graphql-c_parser/lib/graphql`
end
desc "Build the C Extension"
task build_ext: [:build_c_lexer, :build_yacc_parser, "compile:graphql_c_parser_ext", :move_binary]

View File

@ -0,0 +1,41 @@
query AbstractFragments {
node(id: "1") {
...Frag1
}
}
fragment Frag1 on Commentable {
id
__typename
...Frag2
}
fragment Frag2 on Commentable {
id
__typename
...Frag3
}
fragment Frag3 on Commentable {
id
__typename
...Frag4
}
fragment Frag4 on Commentable {
id
__typename
...Frag5
}
fragment Frag5 on Commentable {
id
__typename
...Frag6
}
fragment Frag6 on Commentable {
comments {
body
}
}

View File

@ -0,0 +1,64 @@
query AbstractFragments {
node(id: "1") {
...Frag1
}
}
fragment Frag1 on Commentable {
id
__typename
...Frag9
...Frag2
}
fragment Frag2 on Commentable {
id
__typename
...Frag9
...Frag3
}
fragment Frag3 on Commentable {
id
__typename
...Frag9
...Frag4
}
fragment Frag4 on Commentable {
id
__typename
...Frag9
...Frag5
}
fragment Frag5 on Commentable {
id
__typename
...Frag9
...Frag6
}
fragment Frag6 on Commentable {
...Frag7
...Frag9
name
id
comments {
...Frag8
...Frag7
id
}
}
fragment Frag7 on Node {
id
}
fragment Frag8 on Comment {
body
}
fragment Frag9 on Named {
name
}

View File

@ -0,0 +1,138 @@
# frozen_string_literal: true
module BatchLoading
class GraphQLBatchSchema < GraphQL::Schema
DATA = [
{ id: "1", name: "Bulls", player_ids: ["2", "3"] },
{ id: "2", name: "Michael Jordan", team_id: "1" },
{ id: "3", name: "Scottie Pippin", team_id: "1" },
{ id: "4", name: "Braves", player_ids: ["5", "6"] },
{ id: "5", name: "Chipper Jones", team_id: "4" },
{ id: "6", name: "Tom Glavine", team_id: "4" },
]
class DataLoader < GraphQL::Batch::Loader
def initialize(column: :id)
@column = column
end
def perform(keys)
keys.each do |key|
record = DATA.find { |d| d[@column] == key }
fulfill(key, record)
end
end
end
class Team < GraphQL::Schema::Object
field :name, String, null: false
field :players, "[BatchLoading::GraphQLBatchSchema::Player]", null: false
def players
DataLoader.load_many(object[:player_ids])
end
end
class Player < GraphQL::Schema::Object
field :name, String, null: false
field :team, Team, null: false
def team
DataLoader.load(object[:team_id])
end
end
class Query < GraphQL::Schema::Object
field :team, Team do
argument :name, String
end
def team(name:)
DataLoader.for(column: :name).load(name)
end
end
query(Query)
use GraphQL::Batch
end
class GraphQLDataloaderSchema < GraphQL::Schema
class DataSource < GraphQL::Dataloader::Source
def initialize(options = {column: :id})
@column = options[:column]
end
def fetch(keys)
keys.map { |key|
d = GraphQLBatchSchema::DATA.find { |d| d[@column] == key }
# p [key, @column, d]
d
}
end
end
class Team < GraphQL::Schema::Object
field :name, String, null: false
field :players, "[BatchLoading::GraphQLDataloaderSchema::Player]", null: false
def players
dataloader.with(DataSource).load_all(object[:player_ids])
end
end
class Player < GraphQL::Schema::Object
field :name, String, null: false
field :team, Team, null: false
def team
dataloader.with(DataSource).load(object[:team_id])
end
end
class Query < GraphQL::Schema::Object
field :team, Team do
argument :name, String
end
def team(name:)
dataloader.with(DataSource, column: :name).load(name)
end
end
query(Query)
use GraphQL::Dataloader
end
class GraphQLNoBatchingSchema < GraphQL::Schema
DATA = GraphQLBatchSchema::DATA
class Team < GraphQL::Schema::Object
field :name, String, null: false
field :players, "[BatchLoading::GraphQLNoBatchingSchema::Player]", null: false
def players
object[:player_ids].map { |id| DATA.find { |d| d[:id] == id } }
end
end
class Player < GraphQL::Schema::Object
field :name, String, null: false
field :team, Team, null: false
def team
DATA.find { |d| d[:id] == object[:team_id] }
end
end
class Query < GraphQL::Schema::Object
field :team, Team do
argument :name, String
end
def team(name:)
DATA.find { |d| d[:name] == name }
end
end
query(Query)
end
end

View File

@ -0,0 +1,476 @@
query Anc_inbox_layout($first_0:Int!,$first_5:Int!,$first_6:Int!,$first_a:Int!,$first_c:Int!,$order_by_1:ReportOrderInput!,$substate_2:ReportStateEnum!,$pre_submission_review_states_3:[ReportPreSubmissionReviewStateEnum]!,$size_4:ProfilePictureSizes!,$size_9:ProfilePictureSizes!,$not_types_7:[ActivityTypes]!,$order_by_8:ActivityOrderInput!,$order_by_b:TeamOrderInput!) {
query {
id,
...FE
}
}
fragment F0 on User {
username,
_profile_picturePkPpF:profile_picture(size:$size_4),
impact,
reputation,
signal,
id
}
fragment F1 on Report {
reporter {
username,
_profile_picturePkPpF:profile_picture(size:$size_4),
impact,
reputation,
signal,
_duplicate_users2Nhzxe:duplicate_users(first:$first_5) {
pageInfo {
hasNextPage,
hasPreviousPage
},
total_count,
edges {
node {
id,
...F0
},
cursor
}
},
id
},
id
}
fragment F2 on Report {
id
}
fragment F3 on Node {
id,
__typename
}
fragment F4 on ReportActivityInterface {
automated_response,
genius_execution_id,
report {
team {
handle,
id
},
id
},
__typename,
...F3
}
fragment F5 on Team {
url,
internet_bug_bounty,
_profile_pictureihzmG:profile_picture(size:$size_9),
name,
id
}
fragment F6 on User {
username,
url,
_profile_pictureihzmG:profile_picture(size:$size_9),
id
}
fragment F7 on ActivitiesBugDuplicate {
original_report_id,
id
}
fragment F8 on ActivitiesReferenceIdAdded {
reference,
reference_url,
id
}
fragment F9 on ActivitiesCveIdAdded {
cve_ids,
id
}
fragment Fa on ActivitiesAgreedOnGoingPublic {
first_to_agree,
id
}
fragment Fb on ActivitiesBugCloned {
original_report_id,
id
}
fragment Fc on ActivitiesUserAssignedToBug {
assigned_user {
url,
username,
id
},
id
}
fragment Fd on ActivitiesGroupAssignedToBug {
assigned_group {
name,
id
},
id
}
fragment Fe on ActivitiesExternalUserInvited {
email,
id
}
fragment Ff on ActivitiesExternalUserInvitationCancelled {
email,
id
}
fragment Fg on ActivitiesExternalUserRemoved {
removed_user {
id
},
id
}
fragment Fh on ActivitiesUserBannedFromProgram {
removed_user {
id
},
id
}
fragment Fi on ActivitiesBountyAwarded {
bounty_amount,
bounty_currency,
bonus_amount,
report {
reporter {
username,
url,
id
},
id
},
id
}
fragment Fj on ActivitiesBountySuggested {
bounty_amount,
bounty_currency,
bonus_amount,
id
}
fragment Fk on ActivitiesBugResolved {
report {
reporter {
username,
url,
id
},
id
},
id
}
fragment Fl on ActivitiesSwagAwarded {
report {
reporter {
username,
url,
id
},
id
},
id
}
fragment Fm on ActivitiesChangedScope {
old_scope {
asset_identifier,
id
},
new_scope {
asset_identifier,
id
},
id
}
fragment Fn on ActivityInterface {
_id,
internal,
i_can_edit,
__typename,
message,
markdown_message,
created_at,
updated_at,
actor {
__typename,
...F5,
...F6,
...F3
},
attachments {
_id,
file_name,
content_type,
expiring_url,
id
},
...F7,
...F8,
...F9,
...Fa,
...Fb,
...Fc,
...Fd,
...Fe,
...Ff,
...Fg,
...Fh,
...Fi,
...Fj,
...Fk,
...Fl,
...Fm,
...F3
}
fragment Fo on User {
username,
url,
__typename,
id
}
fragment Fp on TeamMemberGroup {
name,
__typename,
id
}
fragment Fq on Report {
_id,
url,
title,
state,
substate,
created_at,
assignee {
__typename,
...Fo,
...Fp,
...F3
},
cloned_from {
_id,
id
},
reporter {
username,
url,
id
},
team {
_id,
url,
handle,
name,
twitter_handle,
website,
about,
offers_bounties,
id
},
id
}
fragment Fr on Report {
state,
stage,
disclosed_at,
cve_ids,
singular_disclosure_disabled,
disclosed_at,
bug_reporter_agreed_on_going_public_at,
team_member_agreed_on_going_public_at,
comments_closed,
mediation_requested_at,
vulnerability_information,
vulnerability_information_html,
reporter {
disabled,
username,
url,
_profile_picture2g6hJa:profile_picture(size:$size_4),
id
},
weakness {
id,
name
},
original_report {
id,
url
},
attachments {
_id,
file_name,
expiring_url,
content_type,
id
},
allow_singular_disclosure_at,
allow_singular_disclosure_after,
singular_disclosure_allowed,
severity {
rating,
score,
author_type,
id
},
structured_scope {
_id,
asset_type,
asset_identifier,
max_severity,
id
},
_activities4z6spP:activities(first:$first_6,not_types:$not_types_7,order_by:$order_by_8) {
edges {
node {
__typename,
...F4,
...Fn,
...F3
},
cursor
},
pageInfo {
hasNextPage,
hasPreviousPage
}
},
id,
...Fq
}
fragment Fs on Report {
id,
...Fr
}
fragment Ft on Report {
title,
id
}
fragment Fu on Report {
_id,
pre_submission_review_state,
i_can_anc_review,
reporter {
username,
id
},
team {
handle,
id
},
id,
...F2
}
fragment Fv on Report {
team {
policy_html,
id
},
structured_scope {
asset_identifier,
asset_type,
instruction,
id
},
id
}
fragment Fw on Report {
weakness {
name,
id
},
id
}
fragment Fx on Report {
severity {
rating,
score,
id
},
id
}
fragment Fy on Report {
latest_activity_at,
created_at,
id,
...Fq
}
fragment Fz on Query {
me {
username,
_teamsWbVmT:teams(order_by:$order_by_b,first:$first_c) {
edges {
node {
name,
handle,
id
},
cursor
},
pageInfo {
hasNextPage,
hasPreviousPage
}
},
id
},
id
}
fragment FA on Query {
_reports1t04lE:reports(page:$first_0,first:$first_a,limit:$first_a,order_by:$order_by_1,substate:$substate_2,pre_submission_review_states:$pre_submission_review_states_3) {
total_count,
edges {
node {
_id,
id,
...Fy
},
cursor
},
pageInfo {
hasNextPage,
hasPreviousPage
}
},
id,
...Fz
}
fragment FB on Query {
id,
...Fz
}
fragment FC on Query {
id
}
fragment FD on Query {
me {
username,
_profile_pictureihzmG:profile_picture(size:$size_9),
id
},
id,
...FC
}
fragment FE on Query {
_reports3QQXft:reports(first:$first_0,order_by:$order_by_1,substate:$substate_2,pre_submission_review_states:$pre_submission_review_states_3) {
edges {
node {
id,
...F1,
...F2,
...Fs,
...Ft,
...Fu,
...Fv,
...Fw,
...Fx
},
cursor
},
pageInfo {
hasNextPage,
hasPreviousPage
}
},
id,
...FA,
...FB,
...FD
}

File diff suppressed because it is too large Load Diff

700
vendor/gems/graphql/benchmark/run.rb vendored Normal file
View File

@ -0,0 +1,700 @@
# frozen_string_literal: true
require "graphql"
ADD_WARDEN = false
require "jazz"
require "benchmark/ips"
require "stackprof"
require "memory_profiler"
require "graphql/batch"
require "securerandom"
module GraphQLBenchmark
QUERY_STRING = GraphQL::Introspection::INTROSPECTION_QUERY
DOCUMENT = GraphQL.parse(QUERY_STRING)
SCHEMA = Jazz::Schema
BENCHMARK_PATH = File.expand_path("../", __FILE__)
CARD_SCHEMA = GraphQL::Schema.from_definition(File.read(File.join(BENCHMARK_PATH, "schema.graphql")))
ABSTRACT_FRAGMENTS = GraphQL.parse(File.read(File.join(BENCHMARK_PATH, "abstract_fragments.graphql")))
ABSTRACT_FRAGMENTS_2_QUERY_STRING = File.read(File.join(BENCHMARK_PATH, "abstract_fragments_2.graphql"))
ABSTRACT_FRAGMENTS_2 = GraphQL.parse(ABSTRACT_FRAGMENTS_2_QUERY_STRING)
BIG_SCHEMA = GraphQL::Schema.from_definition(File.join(BENCHMARK_PATH, "big_schema.graphql"))
BIG_QUERY_STRING = File.read(File.join(BENCHMARK_PATH, "big_query.graphql"))
BIG_QUERY = GraphQL.parse(BIG_QUERY_STRING)
FIELDS_WILL_MERGE_SCHEMA = GraphQL::Schema.from_definition("type Query { hello: String }")
FIELDS_WILL_MERGE_QUERY = GraphQL.parse("{ #{Array.new(5000, "hello").join(" ")} }")
module_function
def self.run(task)
Benchmark.ips do |x|
case task
when "query"
x.report("query") { SCHEMA.execute(document: DOCUMENT) }
when "validate"
x.report("validate - introspection ") { CARD_SCHEMA.validate(DOCUMENT) }
x.report("validate - abstract fragments") { CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS) }
x.report("validate - abstract fragments 2") { CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS_2) }
x.report("validate - big query") { BIG_SCHEMA.validate(BIG_QUERY) }
x.report("validate - fields will merge") { FIELDS_WILL_MERGE_SCHEMA.validate(FIELDS_WILL_MERGE_QUERY) }
when "scan"
require "graphql/c_parser"
x.report("scan c - introspection") { GraphQL.scan_with_c(QUERY_STRING) }
x.report("scan - introspection") { GraphQL.scan_with_ruby(QUERY_STRING) }
x.report("scan c - fragments") { GraphQL.scan_with_c(ABSTRACT_FRAGMENTS_2_QUERY_STRING) }
x.report("scan - fragments") { GraphQL.scan_with_ruby(ABSTRACT_FRAGMENTS_2_QUERY_STRING) }
x.report("scan c - big query") { GraphQL.scan_with_c(BIG_QUERY_STRING) }
x.report("scan - big query") { GraphQL.scan_with_ruby(BIG_QUERY_STRING) }
when "parse"
# Uncomment this to use the C parser:
# require "graphql/c_parser"
x.report("parse - introspection") { GraphQL.parse(QUERY_STRING) }
x.report("parse - fragments") { GraphQL.parse(ABSTRACT_FRAGMENTS_2_QUERY_STRING) }
x.report("parse - big query") { GraphQL.parse(BIG_QUERY_STRING) }
else
raise("Unexpected task #{task}")
end
end
end
def self.profile_parse
# To profile the C parser instead:
# require "graphql/c_parser"
report = MemoryProfiler.report do
GraphQL.parse(BIG_QUERY_STRING)
GraphQL.parse(QUERY_STRING)
GraphQL.parse(ABSTRACT_FRAGMENTS_2_QUERY_STRING)
end
report.pretty_print
end
def self.validate_memory
FIELDS_WILL_MERGE_SCHEMA.validate(FIELDS_WILL_MERGE_QUERY)
report = MemoryProfiler.report do
FIELDS_WILL_MERGE_SCHEMA.validate(FIELDS_WILL_MERGE_QUERY)
nil
end
report.pretty_print
end
def self.profile
# Warm up any caches:
SCHEMA.execute(document: DOCUMENT)
# CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS)
res = nil
result = StackProf.run(mode: :wall) do
# CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS)
res = SCHEMA.execute(document: DOCUMENT)
end
StackProf::Report.new(result).print_text
end
def self.build_large_schema
Class.new(GraphQL::Schema) do
query_t = Class.new(GraphQL::Schema::Object) do
graphql_name("Query")
int_ts = 5.times.map do |i|
int_t = Module.new do
include GraphQL::Schema::Interface
graphql_name "Interface#{i}"
5.times do |n2|
field :"field#{n2}", String do
argument :arg, String
end
end
end
field :"int_field_#{i}", int_t
int_t
end
obj_ts = 100.times.map do |n|
input_obj_t = Class.new(GraphQL::Schema::InputObject) do
graphql_name("Input#{n}")
argument :arg, String
end
obj_t = Class.new(GraphQL::Schema::Object) do
graphql_name("Object#{n}")
implements(*int_ts)
20.times do |n2|
field :"field#{n2}", String do
argument :input, input_obj_t
end
end
field :self_field, self
field :int_0_field, int_ts[0]
end
field :"rootfield#{n}", obj_t
obj_t
end
10.times do |n|
union_t = Class.new(GraphQL::Schema::Union) do
graphql_name "Union#{n}"
possible_types(*obj_ts.sample(10))
end
field :"unionfield#{n}", union_t
end
end
query(query_t)
end
end
def self.profile_boot
Benchmark.ips do |x|
x.config(time: 10)
x.report("Booting large schema") {
build_large_schema
}
end
result = StackProf.run(mode: :wall, interval: 1) do
build_large_schema
end
StackProf::Report.new(result).print_text
retained_schema = nil
report = MemoryProfiler.report do
retained_schema = build_large_schema
end
report.pretty_print
end
SILLY_LARGE_SCHEMA = build_large_schema
def self.profile_small_query_on_large_schema
schema = Class.new(SILLY_LARGE_SCHEMA)
Benchmark.ips do |x|
x.report("Run small query") {
schema.execute("{ __typename }")
}
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute("{ __typename }")
end
StackProf::Report.new(result).print_text
StackProf.run(mode: :wall, out: "tmp/small_query.dump", interval: 1) do
schema.execute("{ __typename }")
end
report = MemoryProfiler.report do
schema.execute("{ __typename }")
end
puts "\n\n"
report.pretty_print
end
def self.profile_large_introspection
schema = SILLY_LARGE_SCHEMA
Benchmark.ips do |x|
x.config(time: 10)
x.report("Run large introspection") {
schema.to_json
}
end
result = StackProf.run(mode: :wall) do
schema.to_json
end
StackProf::Report.new(result).print_text
retained_schema = nil
report = MemoryProfiler.report do
schema.to_json
end
puts "\n\n"
report.pretty_print
end
def self.profile_large_analysis
query_str = "query {\n".dup
5.times do |n|
query_str << " intField#{n} { "
20.times do |o|
query_str << "...Obj#{o}Fields "
end
query_str << "}\n"
end
query_str << "}"
20.times do |o|
query_str << "fragment Obj#{o}Fields on Object#{o} { "
20.times do |f|
query_str << " field#{f}(arg: \"a\")\n"
end
query_str << " selfField { selfField { selfField { __typename } } }\n"
# query_str << " int0Field { ...Int0Fields }"
query_str << "}\n"
end
# query_str << "fragment Int0Fields on Interface0 { __typename }"
query = GraphQL::Query.new(SILLY_LARGE_SCHEMA, query_str)
analyzers = [
GraphQL::Analysis::AST::FieldUsage,
GraphQL::Analysis::AST::QueryDepth,
GraphQL::Analysis::AST::QueryComplexity
]
Benchmark.ips do |x|
x.report("Running introspection") {
GraphQL::Analysis::AST.analyze_query(query, analyzers)
}
end
StackProf.run(mode: :wall, out: "last-stackprof.dump", interval: 1) do
GraphQL::Analysis::AST.analyze_query(query, analyzers)
end
result = StackProf.run(mode: :wall, interval: 1) do
GraphQL::Analysis::AST.analyze_query(query, analyzers)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
GraphQL::Analysis::AST.analyze_query(query, analyzers)
end
puts "\n\n"
report.pretty_print
end
# Adapted from https://github.com/rmosolgo/graphql-ruby/issues/861
def self.profile_large_result
schema = ProfileLargeResult::Schema
document = ProfileLargeResult::ALL_FIELDS
Benchmark.ips do |x|
x.config(time: 10)
x.report("Querying for #{ProfileLargeResult::DATA.size} objects") {
schema.execute(document: document)
}
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute(document: document)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.execute(document: document)
end
report.pretty_print
end
def self.profile_small_result
schema = ProfileLargeResult::Schema
document = GraphQL.parse <<-GRAPHQL
query {
foos(first: 5) {
__typename
id
int1
int2
string1
string2
foos(first: 5) {
__typename
string1
string2
foo {
__typename
int1
}
}
}
}
GRAPHQL
Benchmark.ips do |x|
x.config(time: 10)
x.report("Querying for #{ProfileLargeResult::DATA.size} objects") {
schema.execute(document: document)
}
end
StackProf.run(mode: :wall, interval: 1, out: "tmp/small.dump") do
schema.execute(document: document)
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute(document: document)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.execute(document: document)
end
report.pretty_print
end
def self.profile_small_introspection
schema = ProfileLargeResult::Schema
document = GraphQL.parse(GraphQL::Introspection::INTROSPECTION_QUERY)
Benchmark.ips do |x|
x.config(time: 5)
x.report("Introspection") {
schema.execute(document: document)
}
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute(document: document)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.execute(document: document)
end
report.pretty_print
end
module ProfileLargeResult
def self.eager_or_proc(value)
ENV["EAGER"] ? value : -> { value }
end
DATA_SIZE = 1000
DATA = DATA_SIZE.times.map {
eager_or_proc({
id: SecureRandom.uuid,
int1: SecureRandom.random_number(100000),
int2: SecureRandom.random_number(100000),
string1: eager_or_proc(SecureRandom.base64),
string2: SecureRandom.base64,
boolean1: SecureRandom.random_number(1) == 0,
boolean2: SecureRandom.random_number(1) == 0,
int_array: eager_or_proc(10.times.map { eager_or_proc(SecureRandom.random_number(100000)) } ),
string_array: 10.times.map { SecureRandom.base64 },
boolean_array: 10.times.map { SecureRandom.random_number(1) == 0 },
})
}
module Bar
include GraphQL::Schema::Interface
field :string_array, [String], null: false
end
module Baz
include GraphQL::Schema::Interface
implements Bar
field :int_array, [Integer], null: false
field :boolean_array, [Boolean], null: false
end
class ExampleExtension < GraphQL::Schema::FieldExtension
end
class FooType < GraphQL::Schema::Object
implements Baz
field :id, ID, null: false, extensions: [ExampleExtension]
field :int1, Integer, null: false, extensions: [ExampleExtension]
field :int2, Integer, null: false, extensions: [ExampleExtension]
field :string1, String, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :string2, String, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :boolean1, Boolean, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :boolean2, Boolean, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :foos, [FooType], null: false, description: "Return a list of Foo objects" do
argument :first, Integer, default_value: DATA_SIZE
end
def foos(first:)
DATA.first(first)
end
field :foo, FooType
def foo
DATA.sample
end
end
class QueryType < GraphQL::Schema::Object
description "Query root of the system"
field :foos, [FooType], null: false, description: "Return a list of Foo objects" do
argument :first, Integer, default_value: DATA_SIZE
end
def foos(first:)
DATA.first(first)
end
end
class Schema < GraphQL::Schema
query QueryType
# use GraphQL::Dataloader
lazy_resolve Proc, :call
end
ALL_FIELDS = GraphQL.parse <<-GRAPHQL
query($skip: Boolean = false) {
foos {
id @skip(if: $skip)
int1
int2
string1
string2
boolean1
boolean2
stringArray
intArray
booleanArray
}
}
GRAPHQL
end
def self.profile_to_definition
require_relative "./batch_loading"
schema = ProfileLargeResult::Schema
schema.to_definition
Benchmark.ips do |x|
x.report("to_definition") { schema.to_definition }
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.to_definition
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.to_definition
end
report.pretty_print
end
def self.profile_from_definition
# require "graphql/c_parser"
schema_str = SILLY_LARGE_SCHEMA.to_definition
Benchmark.ips do |x|
x.report("from_definition") { GraphQL::Schema.from_definition(schema_str) }
end
result = StackProf.run(mode: :wall, interval: 1) do
GraphQL::Schema.from_definition(schema_str)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
GraphQL::Schema.from_definition(schema_str)
end
report.pretty_print
end
def self.profile_batch_loaders
require_relative "./batch_loading"
include BatchLoading
document = GraphQL.parse <<-GRAPHQL
{
braves: team(name: "Braves") { ...TeamFields }
bulls: team(name: "Bulls") { ...TeamFields }
}
fragment TeamFields on Team {
players {
team {
players {
team {
name
}
}
}
}
}
GRAPHQL
batch_result = GraphQLBatchSchema.execute(document: document).to_h
dataloader_result = GraphQLDataloaderSchema.execute(document: document).to_h
no_batch_result = GraphQLNoBatchingSchema.execute(document: document).to_h
results = [batch_result, dataloader_result, no_batch_result].uniq
if results.size > 1
puts "Batch result:"
pp batch_result
puts "Dataloader result:"
pp dataloader_result
puts "No-batch result:"
pp no_batch_result
raise "Got different results -- fix implementation before benchmarking."
end
Benchmark.ips do |x|
x.report("GraphQL::Batch") { GraphQLBatchSchema.execute(document: document) }
x.report("GraphQL::Dataloader") { GraphQLDataloaderSchema.execute(document: document) }
x.report("No Batching") { GraphQLNoBatchingSchema.execute(document: document) }
x.compare!
end
puts "========== GraphQL-Batch Memory =============="
report = MemoryProfiler.report do
GraphQLBatchSchema.execute(document: document)
end
report.pretty_print
puts "========== Dataloader Memory ================="
report = MemoryProfiler.report do
GraphQLDataloaderSchema.execute(document: document)
end
report.pretty_print
puts "========== No Batch Memory =============="
report = MemoryProfiler.report do
GraphQLNoBatchingSchema.execute(document: document)
end
report.pretty_print
end
def self.profile_schema_memory_footprint
schema = nil
report = MemoryProfiler.report do
query_type = Class.new(GraphQL::Schema::Object) do
graphql_name "Query"
100.times do |i|
type = Class.new(GraphQL::Schema::Object) do
graphql_name "Object#{i}"
field :f, Integer
end
field "f#{i}", type
end
end
thing_type = Class.new(GraphQL::Schema::Object) do
graphql_name "Thing"
field :name, String
end
mutation_type = Class.new(GraphQL::Schema::Object) do
graphql_name "Mutation"
100.times do |i|
mutation_class = Class.new(GraphQL::Schema::RelayClassicMutation) do
graphql_name "Do#{i}"
argument :id, "ID"
field :thing, thing_type
field :things, thing_type.connection_type
end
field "f#{i}", mutation: mutation_class
end
end
schema = Class.new(GraphQL::Schema) do
query(query_type)
mutation(mutation_type)
end
end
report.pretty_print
end
class StackDepthSchema < GraphQL::Schema
class Thing < GraphQL::Schema::Object
field :thing, self do
argument :lazy, Boolean, default_value: false
end
def thing(lazy:)
if lazy
-> { :something }
else
:something
end
end
field :stack_trace_depth, Integer do
argument :lazy, Boolean, default_value: false
end
def stack_trace_depth(lazy:)
get_depth = -> {
graphql_caller = caller.select { |c| c.include?("graphql") }
graphql_caller.size
}
if lazy
get_depth
else
get_depth.call
end
end
end
class Query < GraphQL::Schema::Object
field :thing, Thing
def thing
:something
end
end
query(Query)
lazy_resolve(Proc, :call)
end
def self.profile_stack_depth
query_str = <<-GRAPHQL
query($lazyThing: Boolean!, $lazyStackTrace: Boolean!) {
thing {
thing(lazy: $lazyThing) {
thing(lazy: $lazyThing) {
thing(lazy: $lazyThing) {
thing(lazy: $lazyThing) {
stackTraceDepth(lazy: $lazyStackTrace)
}
}
}
}
}
}
GRAPHQL
eager_res = StackDepthSchema.execute(query_str, variables: { lazyThing: false, lazyStackTrace: false })
lazy_res = StackDepthSchema.execute(query_str, variables: { lazyThing: true, lazyStackTrace: false })
very_lazy_res = StackDepthSchema.execute(query_str, variables: { lazyThing: true, lazyStackTrace: true })
get_depth = ->(result) { result["data"]["thing"]["thing"]["thing"]["thing"]["thing"]["stackTraceDepth"] }
puts <<~RESULT
Result Depth
---------------------
Eager #{get_depth.call(eager_res)}
Lazy #{get_depth.call(lazy_res)}
Very Lazy #{get_depth.call(very_lazy_res)}
RESULT
end
end

View File

@ -0,0 +1,118 @@
# A big schema for testing
type Query {
node(id: ID!): Node
}
interface Node {
id: ID!
}
interface Node2 {
id: ID
}
interface Commentable {
id: ID!
comments: [Comment!]!
}
interface Named {
name: String!
}
type Comment implements Node {
author: Player
body: String!
id: ID!
}
type Card implements Node, Commentable, Node2, Named {
name: String!
converted_mana_cost: Int!
mana_cost: String!
colors: [Color!]!
power: Int
toughness: Int
rules_text: String!
id: ID!
comments: [Comment!]!
}
type Printing implements Node, Commentable, Node2 {
card: Card!
expansion: Expansion!
rarity: Rarity!
artist: Artist!
id: ID!
comments: [Comment!]!
}
type Expansion implements Node, Commentable, Named {
name: String!
code: String!
printings: [Printing!]!
block: Block!
id: ID!
comments: [Comment!]!
}
type Block implements Node, Commentable, Named {
name: String!
expansions: [Expansion!]!
id: ID!
comments: [Comment!]!
}
# Eg shard, guild, clan
type Watermark implements Node, Commentable, Named {
name: String!
cards: [Card!]!
colors: [Color!]!
id: ID!
comments: [Comment!]!
}
type Artist implements Node, Commentable, Named {
name: String!
printings: [Printing!]!
id: ID!
comments: [Comment!]!
}
type Player implements Node, Commentable, Named {
name: String!
decks: [Deck!]!
id: ID!
comments: [Comment!]!
}
type Deck implements Node, Commentable, Named {
name: String!
colors: [Color!]!
slots: [Slot!]!
id: ID!
comments: [Comment!]!
}
type Slot implements Node, Commentable {
deck: Deck!
card: Card!
id: ID!
comments: [Comment!]!
}
enum Color {
WHITE
BLUE
BLACK
RED
GREEN
COLORLESS
}
enum Rarity {
COMMON
UNCOMMON
RARE
MYTHIC_RARE
TIMESHIFTED
}

View File

@ -0,0 +1,44 @@
# frozen_string_literal: true
require 'rubocop'
module Cop
module Development
class ContextIsPassedCop < RuboCop::Cop::Base
MSG = <<-MSG
This method also accepts `context` as an argument. Pass it so that the returned value will reflect the current query, or use another method that isn't context-dependent.
MSG
# These are already context-aware or else not query-related
def_node_matcher :likely_query_specific_receiver?, "
{
(send _ {:ast_node :query :context :warden :ctx :query_ctx :query_context})
(lvar {:ast_node :query :context :warden :ctx :query_ctx :query_context})
(ivar {:@query :@context :@warden})
(send _ {:introspection_system})
}
"
def_node_matcher :method_doesnt_receive_second_context_argument?, <<-MATCHER
(send _ {:get_field :get_argument :get_type} _)
MATCHER
def_node_matcher :method_doesnt_receive_first_context_argument?, <<-MATCHER
(send _ {:fields :arguments :types :enum_values})
MATCHER
def_node_matcher :is_enum_values_call_without_arguments?, "
(send (send _ {:enum :enum_type (ivar {:@enum :@enum_type})}) {:values})
"
def on_send(node)
if (
method_doesnt_receive_second_context_argument?(node) ||
method_doesnt_receive_first_context_argument?(node) ||
is_enum_values_call_without_arguments?(node)
) && !likely_query_specific_receiver?(node.to_a[0])
add_offense(node)
end
end
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
require 'rubocop'
module Cop
module Development
class NoEvalCop < RuboCop::Cop::Base
MSG_TEMPLATE = "Don't use `%{eval_method_name}` which accepts strings and may result evaluating unexpected code. Use `%{exec_method_name}` instead, and pass a block."
def on_send(node)
case node.method_name
when :module_eval, :class_eval, :instance_eval
message = MSG_TEMPLATE % { eval_method_name: node.method_name, exec_method_name: node.method_name.to_s.sub("eval", "exec").to_sym }
add_offense node, message: message
end
end
end
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
require 'rubocop'
module Cop
module Development
# Make sure no tests are focused, from https://github.com/rubocop-hq/rubocop/issues/3773#issuecomment-420662102
class NoFocusCop < RuboCop::Cop::Base
MSG = 'Remove `focus` from tests.'
def_node_matcher :focused?, <<-MATCHER
(send nil? :focus)
MATCHER
def on_send(node)
return unless focused?(node)
add_offense node
end
end
end
end

View File

@ -0,0 +1,47 @@
# frozen_string_literal: true
require 'rubocop'
module Cop
module Development
# A custom Rubocop rule to catch uses of `.none?` without a block.
#
# @see https://github.com/rmosolgo/graphql-ruby/pull/2090
class NoneWithoutBlockCop < RuboCop::Cop::Base
MSG = <<-MD
Instead of `.none?` or `.any?` without a block:
- Use `.empty?` to check for an empty collection (faster)
- Add a block to explicitly check for `false` (more clear)
Run `-a` to replace this with `%{bang}.empty?`.
MD
def on_block(node)
# Since this method was called with a block, it can't be
# a case of `.none?` without a block
ignore_node(node.send_node)
end
def on_send(node)
if !ignored_node?(node) && (node.method_name == :none? || node.method_name == :any?) && node.arguments.size == 0
add_offense(node, message: MSG % { bang: node.method_name == :none? ? "" : "!.." } )
end
end
def autocorrect(node)
lambda do |corrector|
if node.method_name == :none?
corrector.replace(node.location.selector, "empty?")
else
# Backtrack to any chained method calls so we can insert `!` before them
full_exp = node
while node.parent.send_type?
full_exp = node.parent
end
new_source = "!" + full_exp.source_range.source.sub("any?", "empty?")
corrector.replace(full_exp, new_source)
end
end
end
end
end
end

Some files were not shown because too many files have changed in this diff Show More