diff --git a/.rubocop_todo/rspec/timecop_freeze.yml b/.rubocop_todo/rspec/timecop_freeze.yml
index 2967793e42a..2e59ce7bdda 100644
--- a/.rubocop_todo/rspec/timecop_freeze.yml
+++ b/.rubocop_todo/rspec/timecop_freeze.yml
@@ -1,24 +1,4 @@
---
RSpec/TimecopFreeze:
Exclude:
- - ee/spec/models/merge_train_spec.rb
- - ee/spec/support/shared_contexts/lib/gitlab/insights/reducers/reducers_shared_contexts.rb
- qa/spec/support/repeater_spec.rb
- - spec/features/users/active_sessions_spec.rb
- - spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
- - spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
- - spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
- - spec/lib/gitlab/auth/unique_ips_limiter_spec.rb
- - spec/lib/gitlab/checks/timed_logger_spec.rb
- - spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
- - spec/lib/gitlab/puma_logging/json_formatter_spec.rb
- - spec/lib/json_web_token/hmac_token_spec.rb
- - spec/models/active_session_spec.rb
- - spec/serializers/entity_date_helper_spec.rb
- - spec/support/cycle_analytics_helpers/test_generation.rb
- - spec/support/helpers/cycle_analytics_helpers.rb
- - spec/support/helpers/javascript_fixtures_helpers.rb
- - spec/support/shared_contexts/rack_attack_shared_context.rb
- - spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
- - spec/workers/concerns/reenqueuer_spec.rb
- - spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
diff --git a/app/assets/javascripts/boards/components/issue_board_filtered_search.vue b/app/assets/javascripts/boards/components/issue_board_filtered_search.vue
index 605e11d1590..e2055325b7a 100644
--- a/app/assets/javascripts/boards/components/issue_board_filtered_search.vue
+++ b/app/assets/javascripts/boards/components/issue_board_filtered_search.vue
@@ -12,8 +12,8 @@ import { TYPE_USER } from '~/graphql_shared/constants';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import { __ } from '~/locale';
import {
- OPERATOR_IS_AND_IS_NOT,
- OPERATOR_IS_ONLY,
+ OPERATORS_IS_NOT,
+ OPERATORS_IS,
TOKEN_TITLE_ASSIGNEE,
TOKEN_TITLE_AUTHOR,
TOKEN_TITLE_CONFIDENTIAL,
@@ -71,7 +71,7 @@ export default {
icon: 'user',
title: TOKEN_TITLE_ASSIGNEE,
type: TOKEN_TYPE_ASSIGNEE,
- operators: OPERATOR_IS_AND_IS_NOT,
+ operators: OPERATORS_IS_NOT,
token: AuthorToken,
unique: true,
fetchAuthors,
@@ -81,7 +81,7 @@ export default {
icon: 'pencil',
title: TOKEN_TITLE_AUTHOR,
type: TOKEN_TYPE_AUTHOR,
- operators: OPERATOR_IS_AND_IS_NOT,
+ operators: OPERATORS_IS_NOT,
symbol: '@',
token: AuthorToken,
unique: true,
@@ -92,7 +92,7 @@ export default {
icon: 'labels',
title: TOKEN_TITLE_LABEL,
type: TOKEN_TYPE_LABEL,
- operators: OPERATOR_IS_AND_IS_NOT,
+ operators: OPERATORS_IS_NOT,
token: LabelToken,
unique: false,
symbol: '~',
@@ -128,7 +128,7 @@ export default {
title: TOKEN_TITLE_CONFIDENTIAL,
unique: true,
token: GlFilteredSearchToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
options: [
{ icon: 'eye-slash', value: 'yes', title: __('Yes') },
{ icon: 'eye', value: 'no', title: __('No') },
diff --git a/app/assets/javascripts/ci/runner/components/search_tokens/paused_token_config.js b/app/assets/javascripts/ci/runner/components/search_tokens/paused_token_config.js
index 97ee8ec3eef..71a145dd4a3 100644
--- a/app/assets/javascripts/ci/runner/components/search_tokens/paused_token_config.js
+++ b/app/assets/javascripts/ci/runner/components/search_tokens/paused_token_config.js
@@ -1,5 +1,5 @@
import { __ } from '~/locale';
-import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
+import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
import { PARAM_KEY_PAUSED, I18N_PAUSED } from '../../constants';
@@ -24,5 +24,5 @@ export const pausedTokenConfig = {
// see: https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1438
title: title.replace(/\s/g, '\u00a0'),
})),
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
};
diff --git a/app/assets/javascripts/ci/runner/components/search_tokens/status_token_config.js b/app/assets/javascripts/ci/runner/components/search_tokens/status_token_config.js
index 117a630719e..4bc32909777 100644
--- a/app/assets/javascripts/ci/runner/components/search_tokens/status_token_config.js
+++ b/app/assets/javascripts/ci/runner/components/search_tokens/status_token_config.js
@@ -1,5 +1,5 @@
import {
- OPERATOR_IS_ONLY,
+ OPERATORS_IS,
TOKEN_TITLE_STATUS,
} from '~/vue_shared/components/filtered_search_bar/constants';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
@@ -38,5 +38,5 @@ export const statusTokenConfig = {
// see: https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1438
title: title.replace(/\s/g, '\u00a0'),
})),
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
};
diff --git a/app/assets/javascripts/ci/runner/components/search_tokens/tag_token_config.js b/app/assets/javascripts/ci/runner/components/search_tokens/tag_token_config.js
index fdeba714385..369b214f952 100644
--- a/app/assets/javascripts/ci/runner/components/search_tokens/tag_token_config.js
+++ b/app/assets/javascripts/ci/runner/components/search_tokens/tag_token_config.js
@@ -1,5 +1,5 @@
import { s__ } from '~/locale';
-import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
+import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import { PARAM_KEY_TAG } from '../../constants';
import TagToken from './tag_token.vue';
@@ -8,5 +8,5 @@ export const tagTokenConfig = {
title: s__('Runners|Tags'),
type: PARAM_KEY_TAG,
token: TagToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
};
diff --git a/app/assets/javascripts/cycle_analytics/components/filter_bar.vue b/app/assets/javascripts/cycle_analytics/components/filter_bar.vue
index 0ad325a8523..bcd846940ae 100644
--- a/app/assets/javascripts/cycle_analytics/components/filter_bar.vue
+++ b/app/assets/javascripts/cycle_analytics/components/filter_bar.vue
@@ -1,7 +1,7 @@
-
+
!FILTER_NONE_ANY.includes(suggestion.value),
);
diff --git a/app/assets/javascripts/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue b/app/assets/javascripts/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue
index 867222279b2..63f5ddd0069 100644
--- a/app/assets/javascripts/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue
+++ b/app/assets/javascripts/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue
@@ -11,7 +11,7 @@ import Api from '~/api';
import { updateHistory, setUrlParams } from '~/lib/utils/url_utility';
import Tracking from '~/tracking';
import {
- OPERATOR_IS_ONLY,
+ OPERATORS_IS,
TOKEN_TITLE_ASSIGNEE,
TOKEN_TITLE_AUTHOR,
} from '~/vue_shared/components/filtered_search_bar/constants';
@@ -119,7 +119,7 @@ export default {
unique: true,
symbol: '@',
token: AuthorToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
fetchPath: this.projectPath,
fetchAuthors: Api.projectUsers.bind(Api),
},
@@ -130,7 +130,7 @@ export default {
unique: true,
symbol: '@',
token: AuthorToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
fetchPath: this.projectPath,
fetchAuthors: Api.projectUsers.bind(Api),
},
diff --git a/app/models/project.rb b/app/models/project.rb
index 096c78dc73c..a07d4147228 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -2736,11 +2736,6 @@ class Project < ApplicationRecord
ci_config_path.blank? || ci_config_path == Gitlab::FileDetector::PATTERNS[:gitlab_ci]
end
- # DO NOT USE. This method will be deprecated soon
- def uses_external_project_ci_config?
- !!(ci_config_path =~ %r{@.+/.+})
- end
-
def limited_protected_branches(limit)
protected_branches.limit(limit)
end
@@ -2861,11 +2856,6 @@ class Project < ApplicationRecord
repository.gitlab_ci_yml_for(sha, ci_config_path_or_default)
end
- # DO NOT USE. This method will be deprecated soon
- def ci_config_external_project
- Project.find_by_full_path(ci_config_path.split('@', 2).last)
- end
-
def enabled_group_deploy_keys
return GroupDeployKey.none unless group
diff --git a/config/feature_flags/development/jira_connect_oauth_self_managed_setting.yml b/config/feature_flags/development/jira_connect_oauth_self_managed_setting.yml
index 05232d0f80a..4cbbd3a46d9 100644
--- a/config/feature_flags/development/jira_connect_oauth_self_managed_setting.yml
+++ b/config/feature_flags/development/jira_connect_oauth_self_managed_setting.yml
@@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/377679
milestone: '15.6'
type: development
group: group::integrations
-default_enabled: false
+default_enabled: true
diff --git a/doc/administration/lfs/index.md b/doc/administration/lfs/index.md
index 8fdc98bd12a..31ca71598c7 100644
--- a/doc/administration/lfs/index.md
+++ b/doc/administration/lfs/index.md
@@ -332,6 +332,28 @@ To check an installed Git LFS client's version, run this command:
git lfs version
```
+## Error viewing a PDF file
+
+When LFS has been configured with object storage and `proxy_download` set to
+`false`, [you may see an error when previewing a PDF file from the Web browser](https://gitlab.com/gitlab-org/gitlab/-/issues/248100):
+
+```plaintext
+An error occurred while loading the file. Please try again later.
+```
+
+This occurs due to Cross-Origin Resource Sharing (CORS) restrictions:
+the browser attempts to load the PDF from object storage, but the object
+storage provider rejects the request since the GitLab domain differs
+from the object storage domain.
+
+To fix this issue, configure your object storage provider's CORS
+settings to allow the GitLab domain. See the following documentation
+for more details:
+
+1. [AWS S3](https://aws.amazon.com/premiumsupport/knowledge-center/s3-configure-cors/)
+1. [Google Cloud Storage](https://cloud.google.com/storage/docs/configuring-cors)
+1. [Azure Storage](https://learn.microsoft.com/en-us/rest/api/storageservices/cross-origin-resource-sharing--cors--support-for-the-azure-storage-services).
+
## Known limitations
- Only compatible with the Git LFS client versions 1.1.0 and later, or 1.0.2.
diff --git a/doc/administration/object_storage.md b/doc/administration/object_storage.md
index d2c9c35148c..4cc6a1b86a5 100644
--- a/doc/administration/object_storage.md
+++ b/doc/administration/object_storage.md
@@ -638,6 +638,16 @@ if this access is not in place include:
Received status code 403 from server: Forbidden
```
+- Object storage buckets need to allow Cross-Origin Resource Sharing
+ (CORS) access from the URL of the GitLab instance. Attempting to load
+ a PDF in the repository page may show the following error:
+
+ ```plaintext
+ An error occurred while loading the file. Please try again later.
+ ```
+
+ See [the LFS documentation](lfs/index.md#error-viewing-a-pdf-file) for more details.
+
Getting a `403 Forbidden` response is specifically called out on the
[package repository documentation](packages/index.md#using-object-storage)
as a side effect of how some build tools work.
diff --git a/doc/administration/postgresql/replication_and_failover.md b/doc/administration/postgresql/replication_and_failover.md
index ee90b120d05..34974046620 100644
--- a/doc/administration/postgresql/replication_and_failover.md
+++ b/doc/administration/postgresql/replication_and_failover.md
@@ -1081,6 +1081,190 @@ Reverting the PostgreSQL upgrade with `gitlab-ctl revert-pg-upgrade` has the sam
`gitlab-ctl pg-upgrade`. You should follow the same procedure by first stopping the replicas,
then reverting the leader, and finally reverting the replicas.
+### Near zero downtime upgrade of PostgreSQL in a Patroni cluster (Experimental)
+
+Patroni enables you to run a major PostgreSQL upgrade without shutting down the cluster. However, this
+requires additional resources to host the new Patroni nodes with the upgraded PostgreSQL. In practice, with this
+procedure, you are:
+
+- Creating a new Patroni cluster with a new version of PostgreSQL.
+- Migrating the data from the existing cluster.
+
+This procedure is non-invasive, and does not impact your existing cluster before switching it off.
+However, it can be both time- and resource-consuming. Consider their trade-offs with availability.
+
+The steps, in order:
+
+1. [Provision resources for the new cluster](#provision-resources-for-the-new-cluster).
+1. [Preflight check](#preflight-check).
+1. [Configure the leader of the new cluster](#configure-the-leader-of-the-new-cluster).
+1. [Start publisher on the existing leader](#start-publisher-on-the-existing-leader).
+1. [Copy the data from the existing cluster](#copy-the-data-from-the-existing-cluster).
+1. [Replicate data from the existing cluster](#replicate-data-from-the-existing-cluster).
+1. [Grow the new cluster](#grow-the-new-cluster).
+1. [Switch the application to use the new cluster](#switch-the-application-to-use-the-new-cluster).
+1. [Clean up](#clean-up).
+
+#### Provision resources for the new cluster
+
+You need a new set of resources for Patroni nodes. The new Patroni cluster does not require exactly the same number
+of nodes as the existing cluster. You may choose a different number of nodes based on your requirements. The new
+cluster uses the existing Consul cluster (with a different `patroni['scope']`) and PgBouncer nodes.
+
+Make sure that at least the leader node of the existing cluster is accessible from the nodes of the new
+cluster.
+
+#### Preflight check
+
+We rely on PostgreSQL [logical replication](https://www.postgresql.org/docs/current/logical-replication.html)
+to support near-zero downtime upgrades of Patroni clusters. The of
+[logical replication requirements](https://www.postgresql.org/docs/current/logical-replication-restrictions.html)
+must be met. In particular, `wal_level` must be `logical`. To check the `wal_level`,
+run the following command with `gitlab-psql` on any node of the existing cluster:
+
+```sql
+SHOW wal_level;
+```
+
+By default, Patroni sets `wal_level` to `replica`. You must increase it to `logical`.
+Changing `wal_level` requires restarting PostgreSQL, so this step leads to a short
+downtime (hence near-zero downtime). To do this on the Patroni **leader** node:
+
+1. Edit `gitlab.rb` by setting:
+
+ ```ruby
+ patroni['postgresql']['wal_level'] = 'logical'
+ ```
+
+1. Run `gitlab-ctl reconfigure`. This writes the configuration but does not restart PostgreSQL service.
+1. Run `gitlab-ctl patroni restart` to restart PostgreSQL and apply the new `wal_level` without triggering
+ failover. For the duration of restart cycle, the cluster leader is unavailable.
+1. Verify the change by running `SHOW wal_level` with `gitlab-psql`.
+
+#### Configure the leader of the new cluster
+
+Configure the first node of the new cluster. It becomes the leader of the new cluster.
+You can use the configuration of the existing cluster, if it is compatible with the new
+PostgreSQL version. Refer to the documentation on [configuring Patroni clusters](#configuring-patroni-cluster).
+
+In addition to the common configuration, you must apply the following in `gitlab.rb` to:
+
+1. Make sure that the new Patroni cluster uses a different scope. The scope is used to namespace the Patroni settings
+ in Consul, making it possible to use the same Consul cluster for the existing and the new clusters.
+
+ ```ruby
+ patroni['scope'] = 'postgresql_new-ha'
+ ```
+
+1. Make sure that Consul agents don't mix PostgreSQL services offered by the existing and the new Patroni
+ clusters. For this purpose, you must use an internal attribute that is currently undocumented:
+
+ ```ruby
+ consul['internal']['postgresql_service_name'] = 'postgresql_new'
+ ```
+
+#### Start publisher on the existing leader
+
+On the existing leader, run this SQL statement with `gitlab-psql` to start a logical replication publisher:
+
+```sql
+CREATE PUBLICATION patroni_upgrade FOR ALL TABLES;
+```
+
+#### Copy the data from the existing cluster
+
+To dump the current database from the existing cluster, run these commands on the
+**leader** of the new cluster:
+
+1. Optional. Copy global database objects:
+
+ ```shell
+ pg_dumpall -h ${EXISTING_CLUSTER_LEADER} -U gitlab-psql -g | gitlab-psql
+ ```
+
+ You can ignore the errors about existing database objects, such as roles. They are
+ created when the node is configured for the first time.
+
+1. Copy the current database:
+
+ ```shell
+ pg_dump -h ${EXISTING_CLUSTER_LEADER} -U gitlab-psql -d gitlabhq_production -s | gitlab-psql
+ ```
+
+ Depending on the size of your database, this command may take a while to complete.
+
+The `pg_dump` and `pg_dumpall` commands are in `/opt/gitlab/embedded/bin`. In these commands,
+`EXISTING_CLUSTER_LEADER` is the host address of the leader node of the existing cluster.
+
+NOTE:
+The `gitlab-psql` user must be able to authenticate the existing leader from the new leader node.
+
+#### Replicate data from the existing cluster
+
+After taking the initial data dump, you must keep the new leader in sync with the
+latest changes of your existing cluster. On the new leader, run this SQL statement
+with `gitlab-psql` to subscribe to publication of the existing leader:
+
+```sql
+CREATE SUBSCRIPTION patroni_upgrade
+ CONNECTION 'host=EXISTING_CLUSTER_LEADER dbname=gitlabhq_production user=gitlab-psql'
+ PUBLICATION patroni_upgrade;
+```
+
+In this statement, `EXISTING_CLUSTER_LEADER` is the host address of the leader node
+of the existing cluster. You can also use
+[other parameters](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS)
+to change the connection string. For example, you can pass the authentication password.
+
+To check the status of replication, run these queries:
+
+- `SELECT * FROM pg_replication_slots WHERE slot_name = 'patroni_upgrade'` on the existing leader (the publisher).
+- `SELECT * FROM pg_stat_subscription` on the new leader (the subscriber).
+
+#### Grow the new cluster
+
+Configure other nodes of the new cluster in the way you
+[configured the leader](#configure-the-leader-of-the-new-cluster).
+Make sure that you use the same `patroni['scope']` and
+`consul['internal']['postgresql_service_name']`.
+
+What happens here:
+
+- The application still uses the existing leader as its database backend.
+- The logical replication ensures that the new leader keeps in sync.
+- When other nodes are added to the new cluster, Patroni handles
+ the replication to the these nodes.
+
+It is a good idea to wait until the replica nodes of the new cluster are initialized and caught up on the replication
+lag.
+
+#### Switch the application to use the new cluster
+
+Up to this point, you can stop the upgrade procedure without losing data on the
+existing cluster. When you switch the database backend of the application and point
+it to the new cluster, the old cluster does not receive new updates. It falls behind
+the new cluster. After this point, any recovery must be done from the nodes of the new cluster.
+
+To do the switch on **all** PgBouncer nodes:
+
+1. Edit `gitlab.rb` by setting:
+
+ ```ruby
+ consul['watchers'] = %w(postgresql_new)
+ consul['internal']['postgresql_service_name'] = 'postgresql_new'
+ ```
+
+1. Run `gitlab-ctl reconfigure`.
+1. You must also run `rm /var/opt/gitlab/consul/watcher_postgresql.json`.
+ This is a [known issue](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/7293).
+
+#### Clean up
+
+After completing these steps, then you can clean up the resources of the old Patroni cluster.
+They are no longer needed. However, before removing the resources, remove the
+logical replication subscription on the new leader by running `DROP SUBSCRIPTION patroni_upgrade`
+with `gitlab-psql`.
+
## Troubleshooting
### Consul and PostgreSQL changes not taking effect
diff --git a/doc/api/merge_trains.md b/doc/api/merge_trains.md
index 111cf5255d6..e8912aac759 100644
--- a/doc/api/merge_trains.md
+++ b/doc/api/merge_trains.md
@@ -154,3 +154,69 @@ Example response:
}
]
```
+
+## Get the status of a merge request on a merge train
+
+Get merge train information for the requested merge request.
+
+```plaintext
+GET /projects/:id/merge_trains/merge_requests/:merge_request_iid
+```
+
+Supported attributes:
+
+| Attribute | Type | Required | Description |
+| ------------------- | -------------- | -------- | ------------------------------------------------------------------------------- |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding). |
+| `merge_request_iid` | integer | yes | The internal ID of the merge request. |
+
+Example request:
+
+```shell
+curl --request GET --header "PRIVATE-TOKEN: " "https://gitlab.example.com/api/v4/projects/597/merge_trains/merge_requests/1"
+```
+
+Example response:
+
+```json
+{
+ "id": 267,
+ "merge_request": {
+ "id": 273,
+ "iid": 1,
+ "project_id": 597,
+ "title": "My title 9",
+ "description": null,
+ "state": "opened",
+ "created_at": "2022-10-31T19:06:05.725Z",
+ "updated_at": "2022-10-31T19:06:05.725Z",
+ "web_url": "http://localhost/namespace18/project21/-/merge_requests/1"
+ },
+ "user": {
+ "id": 933,
+ "username": "user12",
+ "name": "Sidney Jones31",
+ "state": "active",
+ "avatar_url": "https://www.gravatar.com/avatar/6c8365de387cb3db10ecc7b1880203c4?s=80\u0026d=identicon",
+ "web_url": "http://localhost/user12"
+ },
+ "pipeline": {
+ "id": 273,
+ "iid": 1,
+ "project_id": 598,
+ "sha": "b83d6e391c22777fca1ed3012fce84f633d7fed0",
+ "ref": "main",
+ "status": "pending",
+ "source": "push",
+ "created_at": "2022-10-31T19:06:06.231Z",
+ "updated_at": "2022-10-31T19:06:06.231Z",
+ "web_url": "http://localhost/namespace19/project22/-/pipelines/273"
+ },
+ "created_at": "2022-10-31T19:06:06.237Z",
+ "updated_at":"2022-10-31T19:06:06.237Z",
+ "target_branch":"main",
+ "status":"idle",
+ "merged_at":null,
+ "duration":null
+}
+```
diff --git a/doc/api/settings.md b/doc/api/settings.md
index 78dc81c4f84..3a3cd50911f 100644
--- a/doc/api/settings.md
+++ b/doc/api/settings.md
@@ -104,7 +104,9 @@ Example response:
"floc_enabled": false,
"external_pipeline_validation_service_timeout": null,
"external_pipeline_validation_service_token": null,
- "external_pipeline_validation_service_url": null
+ "external_pipeline_validation_service_url": null,
+ "jira_connect_application_key": null,
+ "jira_connect_proxy_url": null
}
```
@@ -218,7 +220,9 @@ Example response:
"external_pipeline_validation_service_timeout": null,
"external_pipeline_validation_service_token": null,
"external_pipeline_validation_service_url": null,
- "can_create_group": false
+ "can_create_group": false,
+ "jira_connect_application_key": "123",
+ "jira_connect_proxy_url": "http://gitlab.example.com"
}
```
@@ -505,6 +509,8 @@ listed in the descriptions of the relevant settings.
| `whats_new_variant` | string | no | What's new variant, possible values: `all_tiers`, `current_tier`, and `disabled`. |
| `web_ide_clientside_preview_enabled` | boolean | no | Live Preview (allow live previews of JavaScript projects in the Web IDE using CodeSandbox Live Preview). |
| `wiki_page_max_content_bytes` | integer | no | Maximum wiki page content size in **bytes**. Default: 52428800 Bytes (50 MB). The minimum value is 1024 bytes. |
+| `jira_connect_application_key` | String | no | Application ID of the OAuth application that should be used to authenticate with the GitLab.com for Jira Cloud app |
+| `jira_connect_proxy_url` | String | no | URL of the GitLab instance that should be used as a proxy for the GitLab.com for Jira Cloud app |
### Package Registry: Package file size limits
diff --git a/doc/development/documentation/index.md b/doc/development/documentation/index.md
index d52db71b633..327bd6044d2 100644
--- a/doc/development/documentation/index.md
+++ b/doc/development/documentation/index.md
@@ -159,26 +159,17 @@ You can use a Rake task to update the `CODEOWNERS` file.
To update the `CODEOWNERS` file:
-1. Open a merge request to update
- [the Rake task](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/tasks/gitlab/tw/codeowners.rake)
- with the latest [TW team assignments](https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments).
-1. Assign the merge request to a backend maintainer for review and merge.
-1. After the MR is merged, go to the root of the `gitlab` repository.
-1. Run the Rake task and save the output in a file:
-
- ```shell
- bundle exec rake tw:codeowners > ~/Desktop/updates.md
- ```
-
-1. Open the file (for example, `~/Desktop/updates.md`) and copy everything
- except the errors at the bottom of the file.
-1. Open the [`CODEOWNERS`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/CODEOWNERS)
- file and paste the lines into the `^[Documentation Pages]` section.
-
- WARNING:
- The documentation section is not the last section of the `CODEOWNERS` file. Don't
- delete data that isn't ours!
-
+1. Review the [TW team assignments](https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments)
+ in the [`codeowners.rake`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/tasks/gitlab/tw/codeowners.rake)
+ file. If any assignments have changed:
+ 1. Update the `codeowners.rake` file with the changes.
+ 1. Assign the merge request to a technical writing manager for review and merge.
+1. After the changes to `codeowners.rake` are merged, go to the root of the `gitlab` repository.
+1. Run the Rake task with this command: `bundle exec rake tw:codeowners`
+1. Review the command output for any pages that need attention to
+ their metadata. Handle any needed changes in a separate merge request.
+1. Add the changes to the CODEOWNERS file to Git: `git add .gitlab/CODEOWNERS`
+1. Commit your changes to your branch, and push your branch up to `origin`.
1. Create a merge request and assign it to a technical writing manager for review.
## Move, rename, or delete a page
diff --git a/doc/development/documentation/styleguide/word_list.md b/doc/development/documentation/styleguide/word_list.md
index d28972a644b..40827984902 100644
--- a/doc/development/documentation/styleguide/word_list.md
+++ b/doc/development/documentation/styleguide/word_list.md
@@ -1064,6 +1064,21 @@ Instead of **and/or**, use **or** or re-write the sentence. This rule also appli
Do not use **slave**. Another option is **secondary**. ([Vale](../testing.md#vale) rule: [`InclusionCultural.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/.vale/gitlab/InclusionCultural.yml))
+## subscription tier
+
+Do not confuse **subscription** or **subscription tier** with **[license](#license)**.
+A user purchases a **subscription**. That subscription has a **tier**.
+
+To describe tiers:
+
+| Instead of | Use |
+|---------------------------------|----------------------------------------|
+| In the Free tier or greater | In any tier |
+| In the Free tier or higher | In any tier |
+| In the Premium tier or greater | In the Premium or Ultimate tier |
+| In the Premium tier or higher | In the Premium or Ultimate tier |
+| In the Premium tier or lower | In the Free or Premium tier |
+
## subgroup
Use **subgroup** (no hyphen) instead of **sub-group**. ([Vale](../testing.md#vale) rule: [`SubstitutionSuggestions.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/.vale/gitlab/SubstitutionSuggestions.yml))
diff --git a/lefthook.yml b/lefthook.yml
index dc2c0b70c6f..cfe68b5c390 100644
--- a/lefthook.yml
+++ b/lefthook.yml
@@ -17,7 +17,7 @@ pre-push:
tags: view haml style
files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD
glob: '*.html.haml'
- run: REVEAL_RUBOCOP_TODO=0 bundle exec haml-lint --config .haml-lint.yml {files}
+ run: REVEAL_RUBOCOP_TODO=0 bundle exec haml-lint --parallel --config .haml-lint.yml {files}
markdownlint:
tags: documentation style
files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD
diff --git a/lib/api/settings.rb b/lib/api/settings.rb
index 26b7e58bc7a..6ed4e6da3f3 100644
--- a/lib/api/settings.rb
+++ b/lib/api/settings.rb
@@ -184,6 +184,8 @@ module API
optional :group_runner_token_expiration_interval, type: Integer, desc: 'Token expiration interval for group runners, in seconds'
optional :project_runner_token_expiration_interval, type: Integer, desc: 'Token expiration interval for project runners, in seconds'
optional :pipeline_limit_per_project_user_sha, type: Integer, desc: "Maximum number of pipeline creation requests allowed per minute per user and commit. Set to 0 for unlimited requests per minute."
+ optional :jira_connect_application_key, type: String, desc: "Application ID of the OAuth application that should be used to authenticate with the GitLab.com for Jira Cloud app"
+ optional :jira_connect_proxy_url, type: String, desc: "URL of the GitLab instance that should be used as a proxy for the GitLab.com for Jira Cloud app"
Gitlab::SSHPublicKey.supported_types.each do |type|
optional :"#{type}_key_restriction",
diff --git a/lib/banzai/filter/syntax_highlight_filter.rb b/lib/banzai/filter/syntax_highlight_filter.rb
index 766715d9e39..489b4d21300 100644
--- a/lib/banzai/filter/syntax_highlight_filter.rb
+++ b/lib/banzai/filter/syntax_highlight_filter.rb
@@ -9,7 +9,7 @@ module Banzai
module Filter
# HTML Filter to highlight fenced code blocks
#
- class SyntaxHighlightFilter < HTML::Pipeline::Filter
+ class SyntaxHighlightFilter < TimeoutHtmlPipelineFilter
include OutputSafety
LANG_PARAMS_DELIMITER = ':'
@@ -19,7 +19,7 @@ module Banzai
CSS = 'pre:not([data-kroki-style]) > code:only-child'
XPATH = Gitlab::Utils::Nokogiri.css_to_xpath(CSS).freeze
- def call
+ def call_with_timeout
doc.xpath(XPATH).each do |node|
highlight_node(node)
end
diff --git a/lib/banzai/filter/timeout_html_pipeline_filter.rb b/lib/banzai/filter/timeout_html_pipeline_filter.rb
new file mode 100644
index 00000000000..e6057fd8e37
--- /dev/null
+++ b/lib/banzai/filter/timeout_html_pipeline_filter.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Banzai
+ module Filter
+ # HTML Filter that wraps a filter in a Gitlab::RenderTimeout.
+ # This way partial results can be returned, and the entire pipeline
+ # is not killed.
+ #
+ # This should not be used for any filter that must be allowed to complete,
+ # like a `ReferenceRedactorFilter`
+ #
+ class TimeoutHtmlPipelineFilter < HTML::Pipeline::Filter
+ RENDER_TIMEOUT = 2.seconds
+
+ def call
+ if Feature.enabled?(:markup_rendering_timeout, context[:project])
+ Gitlab::RenderTimeout.timeout(foreground: RENDER_TIMEOUT) { call_with_timeout }
+ else
+ call_with_timeout
+ end
+ rescue Timeout::Error => e
+ class_name = self.class.name.demodulize
+ timeout_counter.increment(source: class_name)
+ Gitlab::ErrorTracking.track_exception(e, project_id: context[:project]&.id, class_name: class_name)
+
+ # we've timed out, but some work may have already been completed,
+ # so go ahead and return the document
+ doc
+ end
+
+ def call_with_timeout
+ raise NotImplementedError
+ end
+
+ private
+
+ def timeout_counter
+ Gitlab::Metrics.counter(:banzai_filter_timeouts_total, 'Count of the Banzai filters that time out')
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/cross_repo_comparer.rb b/lib/gitlab/git/cross_repo.rb
similarity index 83%
rename from lib/gitlab/git/cross_repo_comparer.rb
rename to lib/gitlab/git/cross_repo.rb
index d42b2a3bd98..d44657e7db1 100644
--- a/lib/gitlab/git/cross_repo_comparer.rb
+++ b/lib/gitlab/git/cross_repo.rb
@@ -2,7 +2,7 @@
module Gitlab
module Git
- class CrossRepoComparer
+ class CrossRepo
attr_reader :source_repo, :target_repo
def initialize(source_repo, target_repo)
@@ -10,15 +10,8 @@ module Gitlab
@target_repo = target_repo
end
- def compare(source_ref, target_ref, straight:)
- ensuring_ref_in_source(target_ref) do |target_commit_id|
- Gitlab::Git::Compare.new(
- source_repo,
- target_commit_id,
- source_ref,
- straight: straight
- )
- end
+ def execute(target_ref, &blk)
+ ensuring_ref_in_source(target_ref, &blk)
end
private
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index b8f4ff0e9c4..2d13d3378c2 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -823,9 +823,14 @@ module Gitlab
end
def compare_source_branch(target_branch_name, source_repository, source_branch_name, straight:)
- CrossRepoComparer
- .new(source_repository, self)
- .compare(source_branch_name, target_branch_name, straight: straight)
+ CrossRepo.new(source_repository, self).execute(target_branch_name) do |target_commit_id|
+ Gitlab::Git::Compare.new(
+ source_repository,
+ target_commit_id,
+ source_branch_name,
+ straight: straight
+ )
+ end
end
def write_ref(ref_path, ref, old_ref: nil)
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index 0d5daeefe90..81ccf3d8589 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -48,6 +48,9 @@ module Gitlab
attrs[:GitConfigOptions] << "receive.maxInputSize=#{receive_max_input_size.megabytes}"
end
+ remote_ip = Gitlab::ApplicationContext.current_context_attribute(:remote_ip)
+ attrs[:RemoteIP] = remote_ip if remote_ip.present?
+
attrs
end
diff --git a/lib/tasks/gitlab/usage_data.rake b/lib/tasks/gitlab/usage_data.rake
index 159b70cd673..32db5e2dff6 100644
--- a/lib/tasks/gitlab/usage_data.rake
+++ b/lib/tasks/gitlab/usage_data.rake
@@ -51,9 +51,12 @@ namespace :gitlab do
desc 'GitLab | UsageDataMetrics | Generate raw SQL metrics queries for RSpec'
task generate_sql_metrics_queries: :environment do
+ require 'active_support/testing/time_helpers'
+ include ActiveSupport::Testing::TimeHelpers
+
path = Rails.root.join('tmp', 'test')
- queries = Timecop.freeze(2021, 1, 1) do
+ queries = travel_to(Time.utc(2021, 1, 1)) do
Gitlab::Usage::ServicePingReport.for(output: :metrics_queries)
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 64f787b32f3..f4db6328d08 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -4551,6 +4551,9 @@ msgstr ""
msgid "An error occurred. Please try again."
msgstr ""
+msgid "An error occurred. Unable to reopen this merge request."
+msgstr ""
+
msgid "An example project for managing Kubernetes clusters integrated with GitLab"
msgstr ""
@@ -33684,6 +33687,9 @@ msgid_plural "Refreshing in %d seconds to show the updated status..."
msgstr[0] ""
msgstr[1] ""
+msgid "Refreshing..."
+msgstr ""
+
msgid "Regenerate export"
msgstr ""
@@ -34220,6 +34226,9 @@ msgstr ""
msgid "Reopened this %{quick_action_target}."
msgstr ""
+msgid "Reopening..."
+msgstr ""
+
msgid "Reopens this %{quick_action_target}."
msgstr ""
diff --git a/spec/features/users/active_sessions_spec.rb b/spec/features/users/active_sessions_spec.rb
index e2ee78a7cc5..1605073acda 100644
--- a/spec/features/users/active_sessions_spec.rb
+++ b/spec/features/users/active_sessions_spec.rb
@@ -4,25 +4,29 @@ require 'spec_helper'
RSpec.describe 'Active user sessions', :clean_gitlab_redis_sessions do
it 'successful login adds a new active user login' do
+ user = create(:user)
+
now = Time.zone.parse('2018-03-12 09:06')
- Timecop.freeze(now) do
- user = create(:user)
+ travel_to(now) do
gitlab_sign_in(user)
expect(page).to have_current_path root_path, ignore_query: true
sessions = ActiveSession.list(user)
expect(sessions.count).to eq 1
+ gitlab_sign_out
+ end
- # refresh the current page updates the updated_at
- Timecop.freeze(now + 1.minute) do
- visit current_path
+ # refresh the current page updates the updated_at
+ travel_to(now + 1.minute) do
+ gitlab_sign_in(user)
- sessions = ActiveSession.list(user)
- expect(sessions.first).to have_attributes(
- created_at: Time.zone.parse('2018-03-12 09:06'),
- updated_at: Time.zone.parse('2018-03-12 09:07')
- )
- end
+ visit current_path
+
+ sessions = ActiveSession.list(user)
+ expect(sessions.first).to have_attributes(
+ created_at: Time.zone.parse('2018-03-12 09:06'),
+ updated_at: Time.zone.parse('2018-03-12 09:07')
+ )
end
end
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 3c26fa97338..0cdab747a8d 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -2,8 +2,8 @@ import { GlFilteredSearchToken } from '@gitlab/ui';
import { keyBy } from 'lodash';
import { ListType } from '~/boards/constants';
import {
- OPERATOR_IS_AND_IS_NOT,
- OPERATOR_IS_ONLY,
+ OPERATORS_IS_NOT,
+ OPERATORS_IS,
TOKEN_TITLE_ASSIGNEE,
TOKEN_TITLE_AUTHOR,
TOKEN_TITLE_LABEL,
@@ -747,7 +747,7 @@ export const mockConfidentialToken = {
title: 'Confidential',
unique: true,
token: GlFilteredSearchToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
options: [
{ icon: 'eye-slash', value: 'yes', title: 'Yes' },
{ icon: 'eye', value: 'no', title: 'No' },
@@ -759,7 +759,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, isSignedI
icon: 'user',
title: TOKEN_TITLE_ASSIGNEE,
type: TOKEN_TYPE_ASSIGNEE,
- operators: OPERATOR_IS_AND_IS_NOT,
+ operators: OPERATORS_IS_NOT,
token: AuthorToken,
unique: true,
fetchAuthors,
@@ -769,7 +769,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, isSignedI
icon: 'pencil',
title: TOKEN_TITLE_AUTHOR,
type: TOKEN_TYPE_AUTHOR,
- operators: OPERATOR_IS_AND_IS_NOT,
+ operators: OPERATORS_IS_NOT,
symbol: '@',
token: AuthorToken,
unique: true,
@@ -780,7 +780,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, isSignedI
icon: 'labels',
title: TOKEN_TITLE_LABEL,
type: TOKEN_TYPE_LABEL,
- operators: OPERATOR_IS_AND_IS_NOT,
+ operators: OPERATORS_IS_NOT,
token: LabelToken,
unique: false,
symbol: '~',
diff --git a/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js b/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js
index d3c7ea50f9d..3dce5a509ca 100644
--- a/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js
+++ b/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js
@@ -7,7 +7,7 @@ import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import TagToken, { TAG_SUGGESTIONS_PATH } from '~/ci/runner/components/search_tokens/tag_token.vue';
-import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
+import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import { getRecentlyUsedSuggestions } from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
jest.mock('~/flash');
@@ -42,7 +42,7 @@ const mockTagTokenConfig = {
type: 'tag',
token: TagToken,
recentSuggestionsStorageKey: mockStorageKey,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
};
describe('TagToken', () => {
diff --git a/spec/frontend/fixtures/freeze_period.rb b/spec/frontend/fixtures/freeze_period.rb
index 5aa466ef015..a1c7564d36e 100644
--- a/spec/frontend/fixtures/freeze_period.rb
+++ b/spec/frontend/fixtures/freeze_period.rb
@@ -13,15 +13,6 @@ RSpec.describe 'Freeze Periods (JavaScript fixtures)' do
remove_repository(project)
end
- around do |example|
- freeze_time do
- # Mock time to sept 19 (intl. talk like a pirate day)
- travel_to(Time.utc(2020, 9, 19))
-
- example.run
- end
- end
-
describe API::FreezePeriods, '(JavaScript fixtures)', type: :request do
include ApiHelpers
diff --git a/spec/frontend/issues/list/mock_data.js b/spec/frontend/issues/list/mock_data.js
index 62fcbf7aad0..72d3fe93745 100644
--- a/spec/frontend/issues/list/mock_data.js
+++ b/spec/frontend/issues/list/mock_data.js
@@ -1,7 +1,7 @@
import {
FILTERED_SEARCH_TERM,
OPERATOR_IS,
- OPERATOR_IS_NOT,
+ OPERATOR_NOT,
OPERATOR_OR,
TOKEN_TYPE_ASSIGNEE,
TOKEN_TYPE_AUTHOR,
@@ -184,41 +184,41 @@ export const locationSearchWithSpecialValues = [
export const filteredTokens = [
{ type: TOKEN_TYPE_AUTHOR, value: { data: 'homer', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_AUTHOR, value: { data: 'marge', operator: OPERATOR_IS_NOT } },
+ { type: TOKEN_TYPE_AUTHOR, value: { data: 'marge', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'bart', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'lisa', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: '5', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'patty', operator: OPERATOR_IS_NOT } },
- { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'selma', operator: OPERATOR_IS_NOT } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'patty', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'selma', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'carl', operator: OPERATOR_OR } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'lenny', operator: OPERATOR_OR } },
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'season 3', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'season 4', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_MILESTONE, value: { data: 'season 20', operator: OPERATOR_IS_NOT } },
- { type: TOKEN_TYPE_MILESTONE, value: { data: 'season 30', operator: OPERATOR_IS_NOT } },
+ { type: TOKEN_TYPE_MILESTONE, value: { data: 'season 20', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_MILESTONE, value: { data: 'season 30', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_LABEL, value: { data: 'cartoon', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_LABEL, value: { data: 'tv', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_LABEL, value: { data: 'live action', operator: OPERATOR_IS_NOT } },
- { type: TOKEN_TYPE_LABEL, value: { data: 'drama', operator: OPERATOR_IS_NOT } },
+ { type: TOKEN_TYPE_LABEL, value: { data: 'live action', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_LABEL, value: { data: 'drama', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v3', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v4', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_RELEASE, value: { data: 'v20', operator: OPERATOR_IS_NOT } },
- { type: TOKEN_TYPE_RELEASE, value: { data: 'v30', operator: OPERATOR_IS_NOT } },
+ { type: TOKEN_TYPE_RELEASE, value: { data: 'v20', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_RELEASE, value: { data: 'v30', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_TYPE, value: { data: 'issue', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_TYPE, value: { data: 'feature', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_TYPE, value: { data: 'bug', operator: OPERATOR_IS_NOT } },
- { type: TOKEN_TYPE_TYPE, value: { data: 'incident', operator: OPERATOR_IS_NOT } },
+ { type: TOKEN_TYPE_TYPE, value: { data: 'bug', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_TYPE, value: { data: 'incident', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_MY_REACTION, value: { data: 'thumbsup', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_MY_REACTION, value: { data: 'thumbsdown', operator: OPERATOR_IS_NOT } },
+ { type: TOKEN_TYPE_MY_REACTION, value: { data: 'thumbsdown', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_CONFIDENTIAL, value: { data: 'yes', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ITERATION, value: { data: '4', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ITERATION, value: { data: '12', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_ITERATION, value: { data: '20', operator: OPERATOR_IS_NOT } },
- { type: TOKEN_TYPE_ITERATION, value: { data: '42', operator: OPERATOR_IS_NOT } },
+ { type: TOKEN_TYPE_ITERATION, value: { data: '20', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_ITERATION, value: { data: '42', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_EPIC, value: { data: '12', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_EPIC, value: { data: '34', operator: OPERATOR_IS_NOT } },
+ { type: TOKEN_TYPE_EPIC, value: { data: '34', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_WEIGHT, value: { data: '1', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_WEIGHT, value: { data: '3', operator: OPERATOR_IS_NOT } },
+ { type: TOKEN_TYPE_WEIGHT, value: { data: '3', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_CONTACT, value: { data: '123', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ORGANIZATION, value: { data: '456', operator: OPERATOR_IS } },
{ type: FILTERED_SEARCH_TERM, value: { data: 'find' } },
diff --git a/spec/frontend/jobs/components/filtered_search/jobs_filtered_search_spec.js b/spec/frontend/jobs/components/filtered_search/jobs_filtered_search_spec.js
index 98bdfc3fcbc..fcdb162dfed 100644
--- a/spec/frontend/jobs/components/filtered_search/jobs_filtered_search_spec.js
+++ b/spec/frontend/jobs/components/filtered_search/jobs_filtered_search_spec.js
@@ -1,6 +1,6 @@
import { GlFilteredSearch } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
+import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import JobsFilteredSearch from '~/jobs/components/filtered_search/jobs_filtered_search.vue';
import { mockFailedSearchToken } from '../../mock_data';
@@ -41,7 +41,7 @@ describe('Jobs filtered search', () => {
icon: 'status',
title: 'Status',
unique: true,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
});
});
diff --git a/spec/frontend/packages_and_registries/harbor_registry/pages/details_spec.js b/spec/frontend/packages_and_registries/harbor_registry/pages/details_spec.js
index 8fd50bea280..69765d31674 100644
--- a/spec/frontend/packages_and_registries/harbor_registry/pages/details_spec.js
+++ b/spec/frontend/packages_and_registries/harbor_registry/pages/details_spec.js
@@ -8,7 +8,7 @@ import ArtifactsList from '~/packages_and_registries/harbor_registry/components/
import waitForPromises from 'helpers/wait_for_promises';
import DetailsHeader from '~/packages_and_registries/harbor_registry/components/details/details_header.vue';
import PersistedSearch from '~/packages_and_registries/shared/components/persisted_search.vue';
-import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
+import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import {
NAME_SORT_FIELD,
TOKEN_TYPE_TAG_NAME,
@@ -137,7 +137,7 @@ describe('Harbor Details Page', () => {
title: s__('HarborRegistry|Tag'),
unique: true,
token: GlFilteredSearchToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
},
],
});
diff --git a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
index ee3eaaf5ef3..e5ad735bf66 100644
--- a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
@@ -6,7 +6,7 @@ import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
import PipelinesFilteredSearch from '~/pipelines/components/pipelines_list/pipelines_filtered_search.vue';
-import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
+import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import { TRACKING_CATEGORIES } from '~/pipelines/constants';
import { users, mockSearch, branches, tags } from '../mock_data';
@@ -63,7 +63,7 @@ describe('Pipelines filtered search', () => {
title: 'Trigger author',
unique: true,
projectId: '21',
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
});
expect(findBranchToken()).toMatchObject({
@@ -73,7 +73,7 @@ describe('Pipelines filtered search', () => {
unique: true,
projectId: '21',
defaultBranchName: 'main',
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
});
expect(findSourceToken()).toMatchObject({
@@ -81,7 +81,7 @@ describe('Pipelines filtered search', () => {
icon: 'trigger-source',
title: 'Source',
unique: true,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
});
expect(findStatusToken()).toMatchObject({
@@ -89,7 +89,7 @@ describe('Pipelines filtered search', () => {
icon: 'status',
title: 'Status',
unique: true,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
});
expect(findTagToken()).toMatchObject({
@@ -97,7 +97,7 @@ describe('Pipelines filtered search', () => {
icon: 'tag',
title: 'Tag name',
unique: true,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_closed_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_closed_spec.js
index 06ee017dee7..270a37f87e7 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_closed_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_closed_spec.js
@@ -1,9 +1,28 @@
-import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { shallowMount, mount } from '@vue/test-utils';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import api from '~/api';
+
+import showGlobalToast from '~/vue_shared/plugins/global_toast';
+
import closedComponent from '~/vue_merge_request_widget/components/states/mr_widget_closed.vue';
import MrWidgetAuthorTime from '~/vue_merge_request_widget/components/mr_widget_author_time.vue';
import StateContainer from '~/vue_merge_request_widget/components/state_container.vue';
+import Actions from '~/vue_merge_request_widget/components/action_buttons.vue';
+
+import { MR_WIDGET_CLOSED_REOPEN_FAILURE } from '~/vue_merge_request_widget/i18n';
+
+jest.mock('~/api', () => ({
+ updateMergeRequest: jest.fn(),
+}));
+jest.mock('~/vue_shared/plugins/global_toast');
+
+useMockLocationHelper();
const MOCK_DATA = {
+ iid: 1,
metrics: {
mergedBy: {},
closedBy: {
@@ -19,22 +38,39 @@ const MOCK_DATA = {
},
targetBranchPath: '/twitter/flight/commits/so_long_jquery',
targetBranch: 'so_long_jquery',
+ targetProjectId: 'twitter/flight',
};
+function createComponent({ shallow = true, props = {} } = {}) {
+ const mounter = shallow ? shallowMount : mount;
+
+ return mounter(closedComponent, {
+ propsData: {
+ mr: MOCK_DATA,
+ ...props,
+ },
+ });
+}
+
+function findActions(wrapper) {
+ return wrapper.findComponent(StateContainer).findComponent(Actions);
+}
+
+function findReopenActionButton(wrapper) {
+ return findActions(wrapper).find('button[data-testid="extension-actions-reopen-button"]');
+}
+
describe('MRWidgetClosed', () => {
let wrapper;
beforeEach(() => {
- wrapper = shallowMount(closedComponent, {
- propsData: {
- mr: MOCK_DATA,
- },
- });
+ wrapper = createComponent();
});
afterEach(() => {
- wrapper.destroy();
- wrapper = null;
+ if (wrapper) {
+ wrapper.destroy();
+ }
});
it('renders closed icon', () => {
@@ -51,4 +87,93 @@ describe('MRWidgetClosed', () => {
dateReadable: MOCK_DATA.metrics.readableClosedAt,
});
});
+
+ describe('actions', () => {
+ describe('reopen', () => {
+ beforeEach(() => {
+ window.gon = { current_user_id: 1 };
+ api.updateMergeRequest.mockResolvedValue(true);
+ wrapper = createComponent({ shallow: false });
+ });
+
+ it('shows the "reopen" button', () => {
+ expect(wrapper.findComponent(StateContainer).props().actions.length).toBe(1);
+ expect(findReopenActionButton(wrapper).text()).toBe('Reopen');
+ });
+
+ it('does not show widget actions when the user is not logged in', () => {
+ window.gon = {};
+
+ wrapper = createComponent();
+
+ expect(findActions(wrapper).exists()).toBe(false);
+ });
+
+ it('makes the reopen request with the correct MR information', async () => {
+ const reopenButton = findReopenActionButton(wrapper);
+
+ reopenButton.trigger('click');
+ await nextTick();
+
+ expect(api.updateMergeRequest).toHaveBeenCalledWith(
+ MOCK_DATA.targetProjectId,
+ MOCK_DATA.iid,
+ { state_event: 'reopen' },
+ );
+ });
+
+ it('shows "Reopening..." while the reopen network request is pending', async () => {
+ const reopenButton = findReopenActionButton(wrapper);
+
+ api.updateMergeRequest.mockReturnValue(new Promise(() => {}));
+
+ reopenButton.trigger('click');
+ await nextTick();
+
+ expect(reopenButton.text()).toBe('Reopening...');
+ });
+
+ it('shows "Refreshing..." when the reopen has succeeded', async () => {
+ const reopenButton = findReopenActionButton(wrapper);
+
+ reopenButton.trigger('click');
+ await waitForPromises();
+
+ expect(reopenButton.text()).toBe('Refreshing...');
+ });
+
+ it('reloads the page when a reopen has succeeded', async () => {
+ const reopenButton = findReopenActionButton(wrapper);
+
+ reopenButton.trigger('click');
+ await waitForPromises();
+
+ expect(window.location.reload).toHaveBeenCalledTimes(1);
+ });
+
+ it('shows "Reopen" when a reopen request has failed', async () => {
+ const reopenButton = findReopenActionButton(wrapper);
+
+ api.updateMergeRequest.mockRejectedValue(false);
+
+ reopenButton.trigger('click');
+ await waitForPromises();
+
+ expect(window.location.reload).not.toHaveBeenCalled();
+ expect(reopenButton.text()).toBe('Reopen');
+ });
+
+ it('requests a toast popup when a reopen request has failed', async () => {
+ const reopenButton = findReopenActionButton(wrapper);
+
+ api.updateMergeRequest.mockRejectedValue(false);
+
+ reopenButton.trigger('click');
+ await waitForPromises();
+
+ expect(showGlobalToast).toHaveBeenCalledTimes(1);
+ expect(showGlobalToast).toHaveBeenCalledWith(MR_WIDGET_CLOSED_REOPEN_FAILURE);
+ });
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
index a6713b7e7e4..f9cc884f221 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
@@ -1,7 +1,7 @@
import { GlFilteredSearchToken } from '@gitlab/ui';
import { mockLabels } from 'jest/vue_shared/components/sidebar/labels_select_vue/mock_data';
import Api from '~/api';
-import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
+import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import BranchToken from '~/vue_shared/components/filtered_search_bar/tokens/branch_token.vue';
import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
@@ -202,7 +202,7 @@ export const mockBranchToken = {
title: 'Source Branch',
unique: true,
token: BranchToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
fetchBranches: Api.branches.bind(Api),
};
@@ -213,7 +213,7 @@ export const mockAuthorToken = {
unique: false,
symbol: '@',
token: AuthorToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
fetchPath: 'gitlab-org/gitlab-test',
fetchAuthors: Api.projectUsers.bind(Api),
};
@@ -225,7 +225,7 @@ export const mockLabelToken = {
unique: false,
symbol: '~',
token: LabelToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
fetchLabels: () => Promise.resolve(mockLabels),
};
@@ -236,7 +236,7 @@ export const mockMilestoneToken = {
unique: true,
symbol: '%',
token: MilestoneToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
fetchMilestones: () => Promise.resolve({ data: mockMilestones }),
};
@@ -254,7 +254,7 @@ export const mockReactionEmojiToken = {
title: 'My-Reaction',
unique: true,
token: EmojiToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
fetchEmojis: () => Promise.resolve(mockEmojis),
};
@@ -265,7 +265,7 @@ export const mockCrmContactToken = {
token: CrmContactToken,
isProject: false,
fullPath: 'group',
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
unique: true,
};
@@ -276,7 +276,7 @@ export const mockCrmOrganizationToken = {
token: CrmOrganizationToken,
isProject: false,
fullPath: 'group',
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
unique: true,
};
@@ -286,7 +286,7 @@ export const mockMembershipToken = {
title: 'Membership',
token: GlFilteredSearchToken,
unique: true,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
options: [
{ value: 'exclude', title: 'Direct' },
{ value: 'only', title: 'Inherited' },
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
index a0126c2bd63..5546b0c7032 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
@@ -17,7 +17,7 @@ import {
import {
DEFAULT_NONE_ANY,
OPERATOR_IS,
- OPERATOR_IS_NOT,
+ OPERATOR_NOT,
} from '~/vue_shared/components/filtered_search_bar/constants';
import {
getRecentlyUsedSuggestions,
@@ -301,9 +301,9 @@ describe('BaseToken', () => {
describe('with default suggestions', () => {
describe.each`
- operator | shouldRenderFilteredSearchSuggestion
- ${OPERATOR_IS} | ${true}
- ${OPERATOR_IS_NOT} | ${false}
+ operator | shouldRenderFilteredSearchSuggestion
+ ${OPERATOR_IS} | ${true}
+ ${OPERATOR_NOT} | ${false}
`('when operator is $operator', ({ shouldRenderFilteredSearchSuggestion, operator }) => {
beforeEach(() => {
const props = {
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
index c0c3c4a9729..2e2d04efb55 100644
--- a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
@@ -2,7 +2,7 @@ import { GlAlert, GlBadge, GlPagination, GlTabs, GlTab } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import Tracking from '~/tracking';
-import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
+import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import PageWrapper from '~/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue';
@@ -293,7 +293,7 @@ describe('AlertManagementEmptyState', () => {
unique: true,
symbol: '@',
token: AuthorToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
fetchPath: '/link',
fetchAuthors: expect.any(Function),
},
@@ -304,7 +304,7 @@ describe('AlertManagementEmptyState', () => {
unique: true,
symbol: '@',
token: AuthorToken,
- operators: OPERATOR_IS_ONLY,
+ operators: OPERATORS_IS,
fetchPath: '/link',
fetchAuthors: expect.any(Function),
},
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index a409c15533b..cbd931c514f 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -192,4 +192,8 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
include_examples "XSS prevention", "ruby"
end
+
+ it_behaves_like "filter timeout" do
+ let(:text) { 'def fun end
' }
+ end
end
diff --git a/spec/lib/banzai/filter/timeout_html_pipeline_filter_spec.rb b/spec/lib/banzai/filter/timeout_html_pipeline_filter_spec.rb
new file mode 100644
index 00000000000..cdb40ef5b04
--- /dev/null
+++ b/spec/lib/banzai/filter/timeout_html_pipeline_filter_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::TimeoutHtmlPipelineFilter do
+ include FilterSpecHelper
+
+ it_behaves_like 'filter timeout' do
+ let(:text) { 'some text
' }
+ end
+
+ it 'raises NotImplementedError' do
+ expect { filter('test') }.to raise_error NotImplementedError
+ end
+
+ context 'when markup_rendering_timeout is disabled' do
+ it 'waits until the execution completes' do
+ text = 'some text
'
+
+ stub_feature_flags(markup_rendering_timeout: false)
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:call_with_timeout) do
+ text
+ end
+ end
+
+ expect(Gitlab::RenderTimeout).not_to receive(:timeout)
+
+ result = filter(text)
+
+ expect(result).to eq text
+ end
+ end
+end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
index 24f8fb40445..271022e7c55 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
@@ -22,10 +22,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder do
project.add_maintainer(user)
mr1.metrics.update!(merged_at: 1.month.ago)
mr2.metrics.update!(merged_at: Time.now)
- end
-
- around do |example|
- Timecop.freeze { example.run }
+ freeze_time
end
describe 'date range parameters' do
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
index 258f4a0d019..4db5d64164e 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
@@ -18,10 +18,6 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Median do
subject { described_class.new(stage: stage, query: query).seconds }
- around do |example|
- Timecop.freeze { example.run }
- end
-
it 'retruns nil when no results' do
expect(subject).to eq(nil)
end
@@ -30,11 +26,11 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Median do
merge_request1 = create(:merge_request, source_branch: '1', target_project: project, source_project: project)
merge_request2 = create(:merge_request, source_branch: '2', target_project: project, source_project: project)
- travel_to(5.minutes.from_now) do
+ travel(5.minutes) do
merge_request1.metrics.update!(merged_at: Time.zone.now)
end
- travel_to(10.minutes.from_now) do
+ travel(10.minutes) do
merge_request2.metrics.update!(merged_at: Time.zone.now)
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
index 34d5158a5ab..ab5a360d908 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
let(:params) { { from: 1.year.ago, current_user: user } }
diff --git a/spec/lib/gitlab/auth/unique_ips_limiter_spec.rb b/spec/lib/gitlab/auth/unique_ips_limiter_spec.rb
index b239de841b6..84f6411eae6 100644
--- a/spec/lib/gitlab/auth/unique_ips_limiter_spec.rb
+++ b/spec/lib/gitlab/auth/unique_ips_limiter_spec.rb
@@ -22,14 +22,14 @@ RSpec.describe Gitlab::Auth::UniqueIpsLimiter, :clean_gitlab_redis_shared_state
end
it 'resets count after specified time window' do
- Timecop.freeze do
+ freeze_time do
expect(described_class.update_and_return_ips_count(user.id, 'ip2')).to eq(1)
expect(described_class.update_and_return_ips_count(user.id, 'ip3')).to eq(2)
+ end
- travel_to(Time.now.utc + described_class.config.unique_ips_limit_time_window) do
- expect(described_class.update_and_return_ips_count(user.id, 'ip4')).to eq(1)
- expect(described_class.update_and_return_ips_count(user.id, 'ip5')).to eq(2)
- end
+ travel_to(Time.now.utc + described_class.config.unique_ips_limit_time_window) do
+ expect(described_class.update_and_return_ips_count(user.id, 'ip4')).to eq(1)
+ expect(described_class.update_and_return_ips_count(user.id, 'ip5')).to eq(2)
end
end
end
diff --git a/spec/lib/gitlab/checks/timed_logger_spec.rb b/spec/lib/gitlab/checks/timed_logger_spec.rb
index 6c488212eca..261fdd6c002 100644
--- a/spec/lib/gitlab/checks/timed_logger_spec.rb
+++ b/spec/lib/gitlab/checks/timed_logger_spec.rb
@@ -17,38 +17,44 @@ RSpec.describe Gitlab::Checks::TimedLogger do
logger.append_message("Checking ref: #{ref}")
end
+ around do |example|
+ freeze_time do
+ example.run
+ end
+ end
+
describe '#log_timed' do
it 'logs message' do
- Timecop.freeze(start + 30.seconds) do
- logger.log_timed(log_messages[:foo], start) { bar_check }
- end
+ travel_to(start + 30.seconds)
+
+ logger.log_timed(log_messages[:foo], start) { bar_check }
expect(logger.full_message).to eq("Checking ref: bar\nFoo message... (30000.0ms)")
end
context 'when time limit was reached' do
it 'cancels action' do
- Timecop.freeze(start + 50.seconds) do
- expect do
- logger.log_timed(log_messages[:foo], start) do
- bar_check
- end
- end.to raise_error(described_class::TimeoutError)
- end
+ travel_to(start + 50.seconds)
+
+ expect do
+ logger.log_timed(log_messages[:foo], start) do
+ bar_check
+ end
+ end.to raise_error(described_class::TimeoutError)
expect(logger.full_message).to eq("Checking ref: bar\nFoo message... (cancelled)")
end
it 'cancels action with time elapsed if work was performed' do
- Timecop.freeze(start + 30.seconds) do
- expect do
- logger.log_timed(log_messages[:foo], start) do
- grpc_check
- end
- end.to raise_error(described_class::TimeoutError)
+ travel_to(start + 30.seconds)
- expect(logger.full_message).to eq("Checking ref: bar\nFoo message... (cancelled after 30000.0ms)")
- end
+ expect do
+ logger.log_timed(log_messages[:foo], start) do
+ grpc_check
+ end
+ end.to raise_error(described_class::TimeoutError)
+
+ expect(logger.full_message).to eq("Checking ref: bar\nFoo message... (cancelled after 30000.0ms)")
end
end
end
diff --git a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
index 0e7d7f1efda..92ffeee8509 100644
--- a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
@@ -29,8 +29,8 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
context 'when from date is given' do
before do
- Timecop.freeze(5.days.ago) { create(:issue, project: project) }
- Timecop.freeze(5.days.from_now) { create(:issue, project: project) }
+ travel_to(5.days.ago) { create(:issue, project: project) }
+ travel_to(5.days.from_now) { create(:issue, project: project) }
end
it "finds the number of issues created after the 'from date'" do
@@ -45,15 +45,15 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
end
it "doesn't find issues from other projects" do
- Timecop.freeze(5.days.from_now) { create(:issue, project: create(:project)) }
+ travel_to(5.days.from_now) { create(:issue, project: create(:project)) }
expect(subject[:value]).to eq('-')
end
context 'when `to` parameter is given' do
before do
- Timecop.freeze(5.days.ago) { create(:issue, project: project) }
- Timecop.freeze(5.days.from_now) { create(:issue, project: project) }
+ travel_to(5.days.ago) { create(:issue, project: project) }
+ travel_to(5.days.from_now) { create(:issue, project: project) }
end
it "doesn't find any record" do
@@ -78,8 +78,8 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
context 'when from date is given' do
before do
- Timecop.freeze(5.days.ago) { create_commit("Test message", project, user, 'master') }
- Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master') }
+ travel_to(5.days.ago) { create_commit("Test message", project, user, 'master') }
+ travel_to(5.days.from_now) { create_commit("Test message", project, user, 'master') }
end
it "finds the number of commits created after the 'from date'" do
@@ -94,21 +94,21 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
end
it "doesn't find commits from other projects" do
- Timecop.freeze(5.days.from_now) { create_commit("Test message", create(:project, :repository), user, 'master') }
+ travel_to(5.days.from_now) { create_commit("Test message", create(:project, :repository), user, 'master') }
expect(subject[:value]).to eq('-')
end
it "finds a large (> 100) number of commits if present" do
- Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master', count: 100) }
+ travel_to(5.days.from_now) { create_commit("Test message", project, user, 'master', count: 100) }
expect(subject[:value]).to eq('100')
end
context 'when `to` parameter is given' do
before do
- Timecop.freeze(5.days.ago) { create_commit("Test message", project, user, 'master') }
- Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master') }
+ travel_to(5.days.ago) { create_commit("Test message", project, user, 'master') }
+ travel_to(5.days.from_now) { create_commit("Test message", project, user, 'master') }
end
it "doesn't find any record" do
diff --git a/spec/lib/gitlab/git/cross_repo_comparer_spec.rb b/spec/lib/gitlab/git/cross_repo_comparer_spec.rb
deleted file mode 100644
index 7888e224d59..00000000000
--- a/spec/lib/gitlab/git/cross_repo_comparer_spec.rb
+++ /dev/null
@@ -1,117 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Git::CrossRepoComparer do
- let(:source_project) { create(:project, :repository) }
- let(:target_project) { create(:project, :repository) }
-
- let(:source_repo) { source_project.repository.raw_repository }
- let(:target_repo) { target_project.repository.raw_repository }
-
- let(:source_branch) { 'feature' }
- let(:target_branch) { 'master' }
- let(:straight) { false }
-
- let(:source_commit) { source_repo.commit(source_branch) }
- let(:target_commit) { source_repo.commit(target_branch) }
-
- subject(:result) { described_class.new(source_repo, target_repo).compare(source_branch, target_branch, straight: straight) }
-
- describe '#compare' do
- context 'within a single repository' do
- let(:target_project) { source_project }
-
- context 'a non-straight comparison' do
- it 'compares without fetching from another repo' do
- expect(source_repo).not_to receive(:fetch_source_branch!)
-
- expect_compare(result, from: source_commit, to: target_commit)
- expect(result.straight).to eq(false)
- end
- end
-
- context 'a straight comparison' do
- let(:straight) { true }
-
- it 'compares without fetching from another repo' do
- expect(source_repo).not_to receive(:fetch_source_branch!)
-
- expect_compare(result, from: source_commit, to: target_commit)
- expect(result.straight).to eq(true)
- end
- end
- end
-
- context 'across two repositories' do
- context 'target ref exists in source repo' do
- it 'compares without fetching from another repo' do
- expect(source_repo).not_to receive(:fetch_source_branch!)
- expect(source_repo).not_to receive(:delete_refs)
-
- expect_compare(result, from: source_commit, to: target_commit)
- end
- end
-
- context 'target ref does not exist in source repo' do
- it 'compares in the source repo by fetching from the target to a temporary ref' do
- new_commit_id = create_commit(target_project.owner, target_repo, target_branch)
- new_commit = target_repo.commit(new_commit_id)
-
- # This is how the temporary ref is generated
- expect(SecureRandom).to receive(:hex).at_least(:once).and_return('foo')
-
- expect(source_repo)
- .to receive(:fetch_source_branch!)
- .with(target_repo, new_commit_id, 'refs/tmp/foo')
- .and_call_original
-
- expect(source_repo).to receive(:delete_refs).with('refs/tmp/foo').and_call_original
-
- expect_compare(result, from: source_commit, to: new_commit)
- end
- end
-
- context 'source ref does not exist in source repo' do
- let(:source_branch) { 'does-not-exist' }
-
- it 'returns an empty comparison' do
- expect(source_repo).not_to receive(:fetch_source_branch!)
- expect(source_repo).not_to receive(:delete_refs)
-
- expect(result).to be_a(::Gitlab::Git::Compare)
- expect(result.commits.size).to eq(0)
- end
- end
-
- context 'target ref does not exist in target repo' do
- let(:target_branch) { 'does-not-exist' }
-
- it 'returns nil' do
- expect(source_repo).not_to receive(:fetch_source_branch!)
- expect(source_repo).not_to receive(:delete_refs)
-
- is_expected.to be_nil
- end
- end
- end
- end
-
- def expect_compare(of, from:, to:)
- expect(of).to be_a(::Gitlab::Git::Compare)
- expect(from).to be_a(::Gitlab::Git::Commit)
- expect(to).to be_a(::Gitlab::Git::Commit)
-
- expect(of.commits).not_to be_empty
- expect(of.head).to eq(from)
- expect(of.base).to eq(to)
- end
-
- def create_commit(user, repo, branch)
- action = { action: :create, file_path: '/FILE', content: 'content' }
-
- result = repo.commit_files(user, branch_name: branch, message: 'Commit', actions: [action])
-
- result.newrev
- end
-end
diff --git a/spec/lib/gitlab/git/cross_repo_spec.rb b/spec/lib/gitlab/git/cross_repo_spec.rb
new file mode 100644
index 00000000000..09a28c144a4
--- /dev/null
+++ b/spec/lib/gitlab/git/cross_repo_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Git::CrossRepo do
+ let_it_be(:source_project) { create(:project, :repository) }
+ let_it_be(:target_project) { create(:project, :repository) }
+
+ let(:source_repo) { source_project.repository.raw_repository }
+ let(:target_repo) { target_project.repository.raw_repository }
+
+ let(:source_branch) { 'feature' }
+ let(:target_branch) { target_repo.root_ref }
+
+ let(:source_commit) { source_repo.commit(source_branch) }
+ let(:target_commit) { source_repo.commit(target_branch) }
+
+ def execute(&block)
+ described_class.new(source_repo, target_repo).execute(target_branch, &block)
+ end
+
+ describe '#execute' do
+ context 'when executed within a single repository' do
+ let(:target_project) { source_project }
+
+ it 'does not fetch from another repo' do
+ expect(source_repo).not_to receive(:fetch_source_branch!)
+
+ expect { |block| execute(&block) }.to yield_with_args(target_branch)
+ end
+ end
+
+ context 'when executed across two repositories' do
+ context 'and target ref exists in source repo' do
+ it 'does not fetch from another repo' do
+ expect(source_repo).not_to receive(:fetch_source_branch!)
+ expect(source_repo).not_to receive(:delete_refs)
+
+ expect { |block| execute(&block) }.to yield_with_args(target_commit.id)
+ end
+ end
+
+ context 'and target ref does not exist in source repo' do
+ let_it_be(:target_project) { create(:project, :repository) }
+
+ it 'fetches from the target to a temporary ref' do
+ new_commit_id = create_commit(target_project.owner, target_repo, target_branch)
+
+ # This is how the temporary ref is generated
+ expect(SecureRandom).to receive(:hex).at_least(:once).and_return('foo')
+
+ expect(source_repo)
+ .to receive(:fetch_source_branch!)
+ .with(target_repo, new_commit_id, 'refs/tmp/foo')
+ .and_call_original
+
+ expect(source_repo).to receive(:delete_refs).with('refs/tmp/foo').and_call_original
+
+ expect { |block| execute(&block) }.to yield_with_args(new_commit_id)
+ end
+ end
+
+ context 'and target ref does not exist in target repo' do
+ let(:target_branch) { 'does-not-exist' }
+
+ it 'returns nil' do
+ expect(source_repo).not_to receive(:fetch_source_branch!)
+ expect(source_repo).not_to receive(:delete_refs)
+
+ expect { |block| execute(&block) }.not_to yield_control
+ end
+ end
+ end
+ end
+
+ def create_commit(user, repo, branch)
+ action = { action: :create, file_path: '/FILE', content: 'content' }
+
+ result = repo.commit_files(user, branch_name: branch, message: 'Commit', actions: [action])
+
+ result.newrev
+ end
+end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 5e27979cbf3..1984c1157fe 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -2211,15 +2211,49 @@ RSpec.describe Gitlab::Git::Repository do
end
describe '#compare_source_branch' do
- it 'delegates to Gitlab::Git::CrossRepoComparer' do
- expect_next_instance_of(::Gitlab::Git::CrossRepoComparer) do |instance|
- expect(instance.source_repo).to eq(:source_repository)
- expect(instance.target_repo).to eq(repository)
+ it 'compares two branches cross repo' do
+ mutable_repository.commit_files(
+ user,
+ branch_name: mutable_repository.root_ref, message: 'Committing something',
+ actions: [{ action: :create, file_path: 'encoding/CHANGELOG', content: 'New file' }]
+ )
- expect(instance).to receive(:compare).with('feature', 'master', straight: :straight)
+ repository.commit_files(
+ user,
+ branch_name: repository.root_ref, message: 'Commit to root ref',
+ actions: [{ action: :create, file_path: 'encoding/CHANGELOG', content: 'One more' }]
+ )
+
+ [
+ [repository, mutable_repository, true],
+ [repository, mutable_repository, false],
+ [mutable_repository, repository, true],
+ [mutable_repository, repository, false]
+ ].each do |source_repo, target_repo, straight|
+ raw_compare = target_repo.compare_source_branch(
+ target_repo.root_ref, source_repo, source_repo.root_ref, straight: straight)
+
+ expect(raw_compare).to be_a(::Gitlab::Git::Compare)
+
+ expect(raw_compare.commits).to eq([source_repo.commit])
+ expect(raw_compare.head).to eq(source_repo.commit)
+ expect(raw_compare.base).to eq(target_repo.commit)
+ expect(raw_compare.straight).to eq(straight)
end
+ end
- repository.compare_source_branch('master', :source_repository, 'feature', straight: :straight)
+ context 'source ref does not exist in source repo' do
+ it 'returns an empty comparison' do
+ expect_next_instance_of(::Gitlab::Git::CrossRepo) do |instance|
+ expect(instance).not_to receive(:fetch_source_branch!)
+ end
+
+ raw_compare = repository.compare_source_branch(
+ repository.root_ref, mutable_repository, 'does-not-exist', straight: true)
+
+ expect(raw_compare).to be_a(::Gitlab::Git::Compare)
+ expect(raw_compare.commits.size).to eq(0)
+ end
end
end
diff --git a/spec/lib/gitlab/puma_logging/json_formatter_spec.rb b/spec/lib/gitlab/puma_logging/json_formatter_spec.rb
index 64ace09e01b..d38f54bccf1 100644
--- a/spec/lib/gitlab/puma_logging/json_formatter_spec.rb
+++ b/spec/lib/gitlab/puma_logging/json_formatter_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::PumaLogging::JSONFormatter do
it "generate json format with timestamp and pid" do
- Timecop.freeze( Time.utc(2019, 12, 04, 9, 10, 11, 123456)) do
- expect(subject.call('log message')).to eq "{\"timestamp\":\"2019-12-04T09:10:11.123Z\",\"pid\":#{Process.pid},\"message\":\"log message\"}"
+ travel_to(Time.utc(2019, 12, 04, 9, 10, 11)) do
+ expect(subject.call('log message')).to eq "{\"timestamp\":\"2019-12-04T09:10:11.000Z\",\"pid\":#{Process.pid},\"message\":\"log message\"}"
end
end
end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 5c9a3cc0a24..f2488229a55 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -349,6 +349,23 @@ RSpec.describe Gitlab::Workhorse do
expect(subject[:GitConfigOptions]).to be_empty
end
end
+
+ context 'when remote_ip is available in the application context' do
+ it 'includes a RemoteIP params' do
+ result = {}
+ Gitlab::ApplicationContext.with_context(remote_ip: "1.2.3.4") do
+ result = described_class.git_http_ok(repository, Gitlab::GlRepository::PROJECT, user, action)
+ end
+ expect(result[:RemoteIP]).to eql("1.2.3.4")
+ end
+ end
+
+ context 'when remote_ip is not available in the application context' do
+ it 'does not include RemoteIP params' do
+ result = described_class.git_http_ok(repository, Gitlab::GlRepository::PROJECT, user, action)
+ expect(result).not_to have_key(:RemoteIP)
+ end
+ end
end
describe '.set_key_and_notify' do
diff --git a/spec/lib/json_web_token/hmac_token_spec.rb b/spec/lib/json_web_token/hmac_token_spec.rb
index cf7e5c54f45..016084eaf69 100644
--- a/spec/lib/json_web_token/hmac_token_spec.rb
+++ b/spec/lib/json_web_token/hmac_token_spec.rb
@@ -1,9 +1,11 @@
# frozen_string_literal: true
require 'json'
-require 'timecop'
+require 'active_support/testing/time_helpers'
RSpec.describe JSONWebToken::HMACToken do
+ include ActiveSupport::Testing::TimeHelpers
+
let(:secret) { 'shh secret squirrel' }
shared_examples 'a valid, non-expired token' do
@@ -54,13 +56,13 @@ RSpec.describe JSONWebToken::HMACToken do
end
context 'that is expired' do
- # Needs the ! so Timecop.freeze() is effective
+ # Needs the ! so freeze_time() is effective
let!(:encoded_token) { described_class.new(secret).encoded }
it "raises exception saying 'Signature has expired'" do
# Needs to be 120 seconds, because the default expiry is 60 seconds
# with an additional 60 second leeway.
- Timecop.freeze(Time.now + 120) do
+ travel_to(Time.now + 120) do
expect { decoded_token }.to raise_error(JWT::ExpiredSignature, 'Signature has expired')
end
end
@@ -77,19 +79,19 @@ RSpec.describe JSONWebToken::HMACToken do
context 'that has expired' do
let(:expire_time) { 0 }
+ around do |example|
+ travel_to(Time.now + 1) { example.run }
+ end
+
context 'with the default leeway' do
- Timecop.freeze(Time.now + 1) do
- it_behaves_like 'a valid, non-expired token'
- end
+ it_behaves_like 'a valid, non-expired token'
end
context 'with a leeway of 0 seconds' do
let(:leeway) { 0 }
it "raises exception saying 'Signature has expired'" do
- Timecop.freeze(Time.now + 1) do
- expect { decoded_token }.to raise_error(JWT::ExpiredSignature, 'Signature has expired')
- end
+ expect { decoded_token }.to raise_error(JWT::ExpiredSignature, 'Signature has expired')
end
end
end
diff --git a/spec/lib/peek/views/active_record_spec.rb b/spec/lib/peek/views/active_record_spec.rb
index 7bc15f40065..fc768bdcb82 100644
--- a/spec/lib/peek/views/active_record_spec.rb
+++ b/spec/lib/peek/views/active_record_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
end
it 'includes db role data and db_config_name name' do
- Timecop.freeze(2021, 2, 23, 10, 0) do
+ travel_to(Time.utc(2021, 2, 23, 10, 0)) do
ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 2.seconds, '2', event_2)
ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 3.seconds, '3', event_3)
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index e76cd22d342..8cccc9ad83e 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -3010,44 +3010,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#uses_external_project_ci_config?' do
- subject(:uses_external_project_ci_config) { project.uses_external_project_ci_config? }
-
- let(:project) { build(:project) }
-
- context 'when ci_config_path is configured with external project' do
- before do
- project.ci_config_path = '.gitlab-ci.yml@hello/world'
- end
-
- it { is_expected.to eq(true) }
- end
-
- context 'when ci_config_path is nil' do
- before do
- project.ci_config_path = nil
- end
-
- it { is_expected.to eq(false) }
- end
-
- context 'when ci_config_path is configured with a file in the project' do
- before do
- project.ci_config_path = 'hello/world/gitlab-ci.yml'
- end
-
- it { is_expected.to eq(false) }
- end
-
- context 'when ci_config_path is configured with remote file' do
- before do
- project.ci_config_path = 'https://example.org/file.yml'
- end
-
- it { is_expected.to eq(false) }
- end
- end
-
describe '#latest_successful_build_for_ref' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create_pipeline(project) }
@@ -7518,15 +7480,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#ci_config_external_project' do
- subject(:ci_config_external_project) { project.ci_config_external_project }
-
- let(:other_project) { create(:project) }
- let(:project) { build(:project, ci_config_path: ".gitlab-ci.yml@#{other_project.full_path}") }
-
- it { is_expected.to eq(other_project) }
- end
-
describe '#enabled_group_deploy_keys' do
let_it_be(:project) { create(:project) }
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 3a9b2d02af5..4eb03204de3 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -61,6 +61,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['inactive_projects_min_size_mb']).to eq(0)
expect(json_response['inactive_projects_send_warning_email_after_months']).to eq(1)
expect(json_response['can_create_group']).to eq(true)
+ expect(json_response['jira_connect_application_key']).to eq(nil)
+ expect(json_response['jira_connect_proxy_url']).to eq(nil)
end
end
@@ -158,7 +160,9 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
inactive_projects_delete_after_months: 24,
inactive_projects_min_size_mb: 10,
inactive_projects_send_warning_email_after_months: 12,
- can_create_group: false
+ can_create_group: false,
+ jira_connect_application_key: '123',
+ jira_connect_proxy_url: 'http://example.com'
}
expect(response).to have_gitlab_http_status(:ok)
@@ -220,6 +224,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['inactive_projects_min_size_mb']).to eq(10)
expect(json_response['inactive_projects_send_warning_email_after_months']).to eq(12)
expect(json_response['can_create_group']).to eq(false)
+ expect(json_response['jira_connect_application_key']).to eq('123')
+ expect(json_response['jira_connect_proxy_url']).to eq('http://example.com')
end
end
diff --git a/spec/requests/api/usage_data_queries_spec.rb b/spec/requests/api/usage_data_queries_spec.rb
index 6ce03954246..c2fb7d0c72a 100644
--- a/spec/requests/api/usage_data_queries_spec.rb
+++ b/spec/requests/api/usage_data_queries_spec.rb
@@ -80,7 +80,7 @@ RSpec.describe API::UsageDataQueries do
end
it 'matches the generated query' do
- Timecop.freeze(2021, 1, 1) do
+ travel_to(Time.utc(2021, 1, 1)) do
get api(endpoint, admin)
end
diff --git a/spec/serializers/entity_date_helper_spec.rb b/spec/serializers/entity_date_helper_spec.rb
index a8c338675e2..5a4571339b3 100644
--- a/spec/serializers/entity_date_helper_spec.rb
+++ b/spec/serializers/entity_date_helper_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe EntityDateHelper do
describe '#remaining_days_in_words' do
around do |example|
- Timecop.freeze(Time.utc(2017, 3, 17)) { example.run }
+ travel_to(Time.utc(2017, 3, 17)) { example.run }
end
context 'when less than 31 days remaining' do
@@ -75,7 +75,9 @@ RSpec.describe EntityDateHelper do
end
it 'returns 1 day remaining when queried mid-day' do
- Timecop.freeze(Time.utc(2017, 3, 17, 13, 10)) do
+ travel_back
+
+ travel_to(Time.utc(2017, 3, 17, 13, 10)) do
expect(milestone_remaining).to eq("1 day remaining")
end
end
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index 5fdefb2b306..b866293393b 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -912,7 +912,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when outside freeze period' do
it 'creates two jobs' do
- Timecop.freeze(2020, 4, 10, 22, 59) do
+ travel_to(Time.utc(2020, 4, 10, 22, 59)) do
expect(pipeline).to be_persisted
expect(build_names).to contain_exactly('test-job', 'deploy-job')
end
@@ -921,7 +921,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when inside freeze period' do
it 'creates one job' do
- Timecop.freeze(2020, 4, 10, 23, 1) do
+ travel_to(Time.utc(2020, 4, 10, 23, 1)) do
expect(pipeline).to be_persisted
expect(build_names).to contain_exactly('test-job')
end
@@ -946,7 +946,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when outside freeze period' do
it 'creates two jobs' do
- Timecop.freeze(2020, 4, 10, 22, 59) do
+ travel_to(Time.utc(2020, 4, 10, 22, 59)) do
expect(pipeline).to be_persisted
expect(build_names).to contain_exactly('deploy-job')
end
@@ -955,7 +955,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when inside freeze period' do
it 'does not create the pipeline', :aggregate_failures do
- Timecop.freeze(2020, 4, 10, 23, 1) do
+ travel_to(Time.utc(2020, 4, 10, 23, 1)) do
expect(response).to be_error
expect(pipeline).not_to be_persisted
end
diff --git a/spec/support/banzai/filter_timeout_shared_examples.rb b/spec/support/banzai/filter_timeout_shared_examples.rb
new file mode 100644
index 00000000000..1f2ebe6fef6
--- /dev/null
+++ b/spec/support/banzai/filter_timeout_shared_examples.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+# This shared_example requires the following variables:
+# - text: The text to be run through the filter
+#
+# Usage:
+#
+# it_behaves_like 'filter timeout' do
+# let(:text) { 'some text' }
+# end
+RSpec.shared_examples 'filter timeout' do
+ context 'when rendering takes too long' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:context) { { project: project } }
+
+ it 'times out' do
+ stub_const("Banzai::Filter::TimeoutHtmlPipelineFilter::RENDER_TIMEOUT", 0.1)
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:call_with_timeout) do
+ sleep(0.2)
+ text
+ end
+ end
+
+ expect(Gitlab::RenderTimeout).to receive(:timeout).and_call_original
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ instance_of(Timeout::Error),
+ project_id: context[:project].id,
+ class_name: described_class.name.demodulize
+ )
+
+ result = filter(text)
+
+ expect(result.to_html).to eq text
+ end
+ end
+end
diff --git a/spec/support/cycle_analytics_helpers/test_generation.rb b/spec/support/cycle_analytics_helpers/test_generation.rb
index f866220b919..816caf5f775 100644
--- a/spec/support/cycle_analytics_helpers/test_generation.rb
+++ b/spec/support/cycle_analytics_helpers/test_generation.rb
@@ -42,17 +42,17 @@ module CycleAnalyticsHelpers
end_time = start_time + rand(1..5).days
start_time_conditions.each do |condition_name, condition_fn|
- Timecop.freeze(start_time) { condition_fn[self, data] }
+ travel_to(start_time) { condition_fn[self, data] }
end
# Run `before_end_fn` at the midpoint between `start_time` and `end_time`
- Timecop.freeze(start_time + (end_time - start_time) / 2) { before_end_fn[self, data] } if before_end_fn
+ travel_to(start_time + (end_time - start_time) / 2) { before_end_fn[self, data] } if before_end_fn
end_time_conditions.each do |condition_name, condition_fn|
- Timecop.freeze(end_time) { condition_fn[self, data] }
+ travel_to(end_time) { condition_fn[self, data] }
end
- Timecop.freeze(end_time + 1.day) { post_fn[self, data] } if post_fn
+ travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
end_time - start_time
end
@@ -74,14 +74,14 @@ module CycleAnalyticsHelpers
end_time = rand(1..10).days.from_now
start_time_conditions.each do |condition_name, condition_fn|
- Timecop.freeze(start_time) { condition_fn[self, data] }
+ travel_to(start_time) { condition_fn[self, data] }
end
end_time_conditions.each do |condition_name, condition_fn|
- Timecop.freeze(end_time) { condition_fn[self, data] }
+ travel_to(end_time) { condition_fn[self, data] }
end
- Timecop.freeze(end_time + 1.day) { post_fn[self, data] } if post_fn
+ travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
# Turn off the stub before checking assertions
allow(self).to receive(:project).and_call_original
@@ -97,17 +97,17 @@ module CycleAnalyticsHelpers
end_time = start_time + rand(1..5).days
# Run `before_end_fn` at the midpoint between `start_time` and `end_time`
- Timecop.freeze(start_time + (end_time - start_time) / 2) { before_end_fn[self, data] } if before_end_fn
+ travel_to(start_time + (end_time - start_time) / 2) { before_end_fn[self, data] } if before_end_fn
end_time_conditions.each do |condition_name, condition_fn|
- Timecop.freeze(start_time) { condition_fn[self, data] }
+ travel_to(start_time) { condition_fn[self, data] }
end
start_time_conditions.each do |condition_name, condition_fn|
- Timecop.freeze(end_time) { condition_fn[self, data] }
+ travel_to(end_time) { condition_fn[self, data] }
end
- Timecop.freeze(end_time + 1.day) { post_fn[self, data] } if post_fn
+ travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
expect(subject[phase].project_median).to be_nil
end
@@ -122,10 +122,10 @@ module CycleAnalyticsHelpers
end_time = rand(1..10).days.from_now
end_time_conditions.each_with_index do |(_condition_name, condition_fn), index|
- Timecop.freeze(end_time + index.days) { condition_fn[self, data] }
+ travel_to(end_time + index.days) { condition_fn[self, data] }
end
- Timecop.freeze(end_time + 1.day) { post_fn[self, data] } if post_fn
+ travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
expect(subject[phase].project_median).to be_nil
end
@@ -139,7 +139,7 @@ module CycleAnalyticsHelpers
start_time = Time.now
start_time_conditions.each do |condition_name, condition_fn|
- Timecop.freeze(start_time) { condition_fn[self, data] }
+ travel_to(start_time) { condition_fn[self, data] }
end
post_fn[self, data] if post_fn
diff --git a/spec/support/helpers/features/runners_helpers.rb b/spec/support/helpers/features/runners_helpers.rb
index 63fc628358c..c5d26108953 100644
--- a/spec/support/helpers/features/runners_helpers.rb
+++ b/spec/support/helpers/features/runners_helpers.rb
@@ -50,7 +50,7 @@ module Spec
page.within(search_bar_selector) do
click_on filter
- # For OPERATOR_IS_ONLY, clicking the filter
+ # For OPERATORS_IS, clicking the filter
# immediately preselects "=" operator
page.find('input').send_keys(value)
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index 32e6e8d50bd..40eb46878ad 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -3,12 +3,14 @@
require 'action_dispatch/testing/test_request'
require 'fileutils'
require 'graphlyte'
+require 'active_support/testing/time_helpers'
require_relative '../../../lib/gitlab/popen'
module JavaScriptFixturesHelpers
extend ActiveSupport::Concern
include Gitlab::Popen
+ include ActiveSupport::Testing::TimeHelpers
extend self
@@ -22,7 +24,7 @@ module JavaScriptFixturesHelpers
# pick an arbitrary date from the past, so tests are not time dependent
# Also see spec/frontend/__helpers__/fake_date/jest.js
- Timecop.freeze(Time.utc(2015, 7, 3, 10)) { example.run }
+ travel_to(Time.utc(2015, 7, 3, 10)) { example.run }
raise NoMethodError.new('You need to set `response` for the fixture generator! This will automatically happen with `type: :controller` or `type: :request`.', 'response') unless respond_to?(:response)
diff --git a/spec/support/shared_contexts/rack_attack_shared_context.rb b/spec/support/shared_contexts/rack_attack_shared_context.rb
index e7b2ee76c3c..12625ead72b 100644
--- a/spec/support/shared_contexts/rack_attack_shared_context.rb
+++ b/spec/support/shared_contexts/rack_attack_shared_context.rb
@@ -6,7 +6,7 @@ RSpec.shared_context 'rack attack cache store' do
Rack::Attack.cache.store = ActiveSupport::Cache::MemoryStore.new
# Make time-dependent tests deterministic
- Timecop.freeze { example.run }
+ freeze_time { example.run }
Rack::Attack.cache.store = Rails.cache
end
diff --git a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
index 11759b6671f..82ed6eb4c95 100644
--- a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
+++ b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
@@ -68,6 +68,7 @@ RSpec.shared_examples 'rate-limited token requests' do
# Set low limits
settings_to_set[:"#{throttle_setting_prefix}_requests_per_period"] = requests_per_period
settings_to_set[:"#{throttle_setting_prefix}_period_in_seconds"] = period_in_seconds
+ travel_back
end
after do
@@ -220,6 +221,7 @@ RSpec.shared_examples 'rate-limited web authenticated requests' do
# Set low limits
settings_to_set[:"#{throttle_setting_prefix}_requests_per_period"] = requests_per_period
settings_to_set[:"#{throttle_setting_prefix}_period_in_seconds"] = period_in_seconds
+ travel_back
end
after do
@@ -436,6 +438,7 @@ RSpec.shared_examples 'rate-limited unauthenticated requests' do
# Set low limits
settings_to_set[:"#{throttle_setting_prefix}_requests_per_period"] = requests_per_period
settings_to_set[:"#{throttle_setting_prefix}_period_in_seconds"] = period_in_seconds
+ travel_back
end
context 'when the throttle is enabled' do
diff --git a/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb b/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
index 11343f69d6f..491ea64cff1 100644
--- a/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
+++ b/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
@@ -10,23 +10,24 @@ RSpec.describe Metrics::Dashboard::PruneOldAnnotationsWorker do
describe '#perform' do
it 'removes all annotations older than cut off', :aggregate_failures do
- Timecop.freeze(now) do
+ travel_to(now) do
described_class.new.perform
expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation, two_weeks_old_annotation])
# is idempotent in the scope of 24h
expect { described_class.new.perform }.not_to change { Metrics::Dashboard::Annotation.all.to_a }
- travel_to(24.hours.from_now) do
- described_class.new.perform
- expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation])
- end
+ end
+
+ travel_to(now + 24.hours) do
+ described_class.new.perform
+ expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation])
end
end
context 'batch to be deleted is bigger than upper limit' do
it 'schedules second job to clear remaining records' do
- Timecop.freeze(now) do
+ travel_to(now) do
create(:metrics_dashboard_annotation, starting_at: 1.month.ago)
stub_const("#{described_class}::DELETE_LIMIT", 1)
diff --git a/workhorse/gitaly_test.go b/workhorse/gitaly_test.go
index 234a11e5dc9..b9cf4fcc685 100644
--- a/workhorse/gitaly_test.go
+++ b/workhorse/gitaly_test.go
@@ -272,6 +272,7 @@ func TestPostReceivePackProxiedToGitalySuccessfully(t *testing.T) {
require.Equal(t, apiResponse.Repository.RelativePath, gitalyRequest.Repository.RelativePath)
require.Equal(t, apiResponse.GL_ID, gitalyRequest.GlId)
require.Equal(t, apiResponse.GL_USERNAME, gitalyRequest.GlUsername)
+ require.Equal(t, apiResponse.RemoteIp, "1.2.3.4")
require.Equal(t, apiResponse.GitConfigOptions, gitalyRequest.GitConfigOptions)
require.Equal(t, gitProtocol, gitalyRequest.GitProtocol)
diff --git a/workhorse/internal/api/api.go b/workhorse/internal/api/api.go
index 6a6a51b27bb..ba27a3e6ec9 100644
--- a/workhorse/internal/api/api.go
+++ b/workhorse/internal/api/api.go
@@ -128,6 +128,10 @@ type Response struct {
// GL_REPOSITORY is an environment variable used by gitlab-shell hooks during
// 'git push' and 'git pull'
GL_REPOSITORY string
+
+ // RemoteIp holds the IP of the request issuing the action
+ RemoteIp string
+
// GitConfigOptions holds the custom options that we want to pass to the git command
GitConfigOptions []string
// StoreLFSPath is provided by the GitLab Rails application to mark where the tmp file should be placed.
diff --git a/workhorse/internal/git/info-refs.go b/workhorse/internal/git/info-refs.go
index 2eaed388f60..89db2954e30 100644
--- a/workhorse/internal/git/info-refs.go
+++ b/workhorse/internal/git/info-refs.go
@@ -59,8 +59,7 @@ func handleGetInfoRefsWithGitaly(ctx context.Context, responseWriter *HttpRespon
ctx,
a.GitalyServer,
gitaly.WithFeatures(a.GitalyServer.Features),
- gitaly.WithUserID(a.GL_ID),
- gitaly.WithUsername(a.GL_USERNAME),
+ gitaly.WithLoggingMetadata(a),
)
if err != nil {
return err
diff --git a/workhorse/internal/git/receive-pack.go b/workhorse/internal/git/receive-pack.go
index a85f0edccac..b312bbad621 100644
--- a/workhorse/internal/git/receive-pack.go
+++ b/workhorse/internal/git/receive-pack.go
@@ -24,8 +24,7 @@ func handleReceivePack(w *HttpResponseWriter, r *http.Request, a *api.Response)
r.Context(),
a.GitalyServer,
gitaly.WithFeatures(a.GitalyServer.Features),
- gitaly.WithUserID(a.GL_ID),
- gitaly.WithUsername(a.GL_USERNAME),
+ gitaly.WithLoggingMetadata(a),
)
if err != nil {
return fmt.Errorf("smarthttp.ReceivePack: %v", err)
diff --git a/workhorse/internal/git/upload-pack.go b/workhorse/internal/git/upload-pack.go
index bbed5224b2d..13c0069cde2 100644
--- a/workhorse/internal/git/upload-pack.go
+++ b/workhorse/internal/git/upload-pack.go
@@ -48,8 +48,7 @@ func handleUploadPackWithGitaly(ctx context.Context, a *api.Response, clientRequ
ctx,
a.GitalyServer,
gitaly.WithFeatures(a.GitalyServer.Features),
- gitaly.WithUserID(a.GL_ID),
- gitaly.WithUsername(a.GL_USERNAME),
+ gitaly.WithLoggingMetadata(a),
)
if err != nil {
return fmt.Errorf("get gitaly client: %w", err)
diff --git a/workhorse/internal/gitaly/gitaly.go b/workhorse/internal/gitaly/gitaly.go
index b695acbb688..799159689ae 100644
--- a/workhorse/internal/gitaly/gitaly.go
+++ b/workhorse/internal/gitaly/gitaly.go
@@ -69,18 +69,6 @@ func InitializeSidechannelRegistry(logger *logrus.Logger) {
type MetadataFunc func(metadata.MD)
-func WithUserID(userID string) MetadataFunc {
- return func(md metadata.MD) {
- md.Append("user_id", userID)
- }
-}
-
-func WithUsername(username string) MetadataFunc {
- return func(md metadata.MD) {
- md.Append("username", username)
- }
-}
-
func WithFeatures(features map[string]string) MetadataFunc {
return func(md metadata.MD) {
for k, v := range features {
@@ -92,6 +80,20 @@ func WithFeatures(features map[string]string) MetadataFunc {
}
}
+func WithLoggingMetadata(r *api.Response) MetadataFunc {
+ return func(md metadata.MD) {
+ if r.GL_ID != "" {
+ md.Append("user_id", r.GL_ID)
+ }
+ if r.GL_USERNAME != "" {
+ md.Append("username", r.GL_USERNAME)
+ }
+ if r.RemoteIp != "" {
+ md.Append("remote_ip", r.RemoteIp)
+ }
+ }
+}
+
func withOutgoingMetadata(ctx context.Context, addMetadataFuncs ...MetadataFunc) context.Context {
md := metadata.New(nil)
diff --git a/workhorse/internal/gitaly/gitaly_test.go b/workhorse/internal/gitaly/gitaly_test.go
index f693f102447..f81dc16149a 100644
--- a/workhorse/internal/gitaly/gitaly_test.go
+++ b/workhorse/internal/gitaly/gitaly_test.go
@@ -22,12 +22,16 @@ func TestNewSmartHTTPClient(t *testing.T) {
context.Background(),
serverFixture(),
WithFeatures(features()),
- WithUsername("gl_username"),
- WithUserID("gl_id"),
+ WithLoggingMetadata(&api.Response{
+ GL_USERNAME: "gl_username",
+ GL_ID: "gl_id",
+ RemoteIp: "1.2.3.4",
+ }),
)
require.NoError(t, err)
testOutgoingMetadata(t, ctx)
testOutgoingIDAndUsername(t, ctx)
+ testOutgoingRemoteIP(t, ctx)
require.NotNil(t, client.sidechannelRegistry)
}
@@ -95,6 +99,13 @@ func testOutgoingIDAndUsername(t *testing.T, ctx context.Context) {
require.Equal(t, md["username"], []string{"gl_username"})
}
+func testOutgoingRemoteIP(t *testing.T, ctx context.Context) {
+ md, ok := metadata.FromOutgoingContext(ctx)
+ require.True(t, ok, "get metadata from context")
+
+ require.Equal(t, md["remote_ip"], []string{"1.2.3.4"})
+}
+
func features() map[string]string {
features := make(map[string]string)
for k, v := range allowedFeatures() {
diff --git a/workhorse/main_test.go b/workhorse/main_test.go
index 5ebc26c7ac7..8dce3480e0b 100644
--- a/workhorse/main_test.go
+++ b/workhorse/main_test.go
@@ -813,6 +813,7 @@ func gitOkBody(t *testing.T) *api.Response {
return &api.Response{
GL_ID: "user-123",
GL_USERNAME: "username",
+ RemoteIp: "1.2.3.4",
Repository: gitalypb.Repository{
StorageName: "default",
RelativePath: "foo/bar.git",