Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-05-18 18:10:54 +00:00
parent 346c2ebb5a
commit 042cd704b8
66 changed files with 1785 additions and 196 deletions

View File

@ -525,6 +525,8 @@
.qa:rules:package-and-qa:
rules:
- <<: *if-not-ee
when: never
- <<: *if-dot-com-gitlab-org-and-security-merge-request
changes: *ci-qa-patterns
allow_failure: true

View File

@ -3,6 +3,7 @@ import { GlLoadingIcon, GlPagination, GlSprintf } from '@gitlab/ui';
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import Mousetrap from 'mousetrap';
import { mapState, mapGetters, mapActions } from 'vuex';
import { DynamicScroller, DynamicScrollerItem } from 'vendor/vue-virtual-scroller';
import api from '~/api';
import {
keysFor,
@ -17,7 +18,6 @@ import { getParameterByName, parseBoolean } from '~/lib/utils/common_utils';
import { updateHistory } from '~/lib/utils/url_utility';
import { __ } from '~/locale';
import PanelResizer from '~/vue_shared/components/panel_resizer.vue';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import notesEventHub from '../../notes/event_hub';
import {
@ -69,8 +69,9 @@ export default {
PanelResizer,
GlPagination,
GlSprintf,
DynamicScroller,
DynamicScrollerItem,
},
mixins: [glFeatureFlagsMixin()],
alerts: {
ALERT_OVERFLOW_HIDDEN,
ALERT_MERGE_CONFLICT,
@ -196,7 +197,12 @@ export default {
'renderTreeList',
'showWhitespace',
]),
...mapGetters('diffs', ['whichCollapsedTypes', 'isParallelView', 'currentDiffIndex']),
...mapGetters('diffs', [
'whichCollapsedTypes',
'isParallelView',
'currentDiffIndex',
'isVirtualScrollingEnabled',
]),
...mapGetters('batchComments', ['draftsCount']),
...mapGetters(['isNotesFetched', 'getNoteableData']),
diffs() {
@ -561,17 +567,41 @@ export default {
<commit-widget v-if="commit" :commit="commit" :collapsible="false" />
<div v-if="isBatchLoading" class="loading"><gl-loading-icon size="lg" /></div>
<template v-else-if="renderDiffFiles">
<diff-file
v-for="(file, index) in diffs"
:key="file.newPath"
:file="file"
:reviewed="fileReviews[file.id]"
:is-first-file="index === 0"
:is-last-file="index === diffFilesLength - 1"
:help-page-path="helpPagePath"
:can-current-user-fork="canCurrentUserFork"
:view-diffs-file-by-file="viewDiffsFileByFile"
/>
<dynamic-scroller
v-if="isVirtualScrollingEnabled"
:items="diffs"
:min-item-size="70"
:buffer="1000"
:use-transform="false"
page-mode
>
<template #default="{ item, index, active }">
<dynamic-scroller-item :item="item" :active="active">
<diff-file
:file="item"
:reviewed="fileReviews[item.id]"
:is-first-file="index === 0"
:is-last-file="index === diffFilesLength - 1"
:help-page-path="helpPagePath"
:can-current-user-fork="canCurrentUserFork"
:view-diffs-file-by-file="viewDiffsFileByFile"
/>
</dynamic-scroller-item>
</template>
</dynamic-scroller>
<template v-else>
<diff-file
v-for="(file, index) in diffs"
:key="file.new_path"
:file="file"
:reviewed="fileReviews[file.id]"
:is-first-file="index === 0"
:is-last-file="index === diffFilesLength - 1"
:help-page-path="helpPagePath"
:can-current-user-fork="canCurrentUserFork"
:view-diffs-file-by-file="viewDiffsFileByFile"
/>
</template>
<div
v-if="showFileByFileNavigation"
data-testid="file-by-file-navigation"

View File

@ -49,9 +49,7 @@ export default {
},
},
computed: {
...mapState({
projectPath: (state) => state.diffs.projectPath,
}),
...mapState('diffs', ['projectPath']),
...mapGetters('diffs', [
'isInlineView',
'isParallelView',

View File

@ -83,7 +83,7 @@ export default {
computed: {
...mapState('diffs', ['currentDiffFileId', 'codequalityDiff']),
...mapGetters(['isNotesFetched']),
...mapGetters('diffs', ['getDiffFileDiscussions']),
...mapGetters('diffs', ['getDiffFileDiscussions', 'isVirtualScrollingEnabled']),
viewBlobHref() {
return escape(this.file.view_path);
},
@ -290,6 +290,7 @@ export default {
'is-active': currentDiffFileId === file.file_hash,
'comments-disabled': Boolean(file.brokenSymlink),
'has-body': showBody,
'is-virtual-scrolling': isVirtualScrollingEnabled,
}"
:data-path="file.new_path"
class="diff-file file-holder gl-border-none"

View File

@ -170,3 +170,6 @@ export function suggestionCommitMessage(state, _, rootState) {
},
});
}
export const isVirtualScrollingEnabled = (state) =>
!state.viewDiffsFileByFile && window.gon?.features?.diffsVirtualScrolling;

View File

@ -729,7 +729,7 @@ table.code {
}
.files {
.diff-file:last-child {
.diff-file:not(.is-virtual-scrolling):last-child {
margin-bottom: 0;
}
}

View File

@ -7,6 +7,10 @@
.diff-files-holder {
flex: 1;
min-width: 0;
.vue-recycle-scroller__item-wrapper {
overflow: visible;
}
}
.with-system-header {

View File

@ -42,6 +42,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:confidential_notes, @project, default_enabled: :yaml)
push_frontend_feature_flag(:usage_data_i_testing_summary_widget_total, @project, default_enabled: :yaml)
push_frontend_feature_flag(:improved_emoji_picker, project, default_enabled: :yaml)
push_frontend_feature_flag(:diffs_virtual_scrolling, project, default_enabled: :yaml)
# Usage data feature flags
push_frontend_feature_flag(:users_expanding_widgets_usage_data, @project, default_enabled: :yaml)

View File

@ -128,8 +128,41 @@ class Project < ApplicationRecord
after_initialize :use_hashed_storage
after_create :check_repository_absence!
acts_as_ordered_taggable
alias_method :topics, :tag_list
acts_as_ordered_taggable_on :topics
# The 'tag_list' alias and the 'has_many' associations are required during the 'tags -> topics' migration
# TODO: eliminate 'tag_list', 'topic_taggings' and 'tags' in the further process of the migration
# https://gitlab.com/gitlab-org/gitlab/-/issues/331081
alias_attribute :tag_list, :topic_list
has_many :topic_taggings, -> { includes(:tag).order("#{ActsAsTaggableOn::Tagging.table_name}.id") },
as: :taggable,
class_name: 'ActsAsTaggableOn::Tagging',
after_add: :dirtify_tag_list,
after_remove: :dirtify_tag_list
has_many :topics, -> { order("#{ActsAsTaggableOn::Tagging.table_name}.id") },
class_name: 'ActsAsTaggableOn::Tag',
through: :topic_taggings,
source: :tag
has_many :tags, -> { order("#{ActsAsTaggableOn::Tagging.table_name}.id") },
class_name: 'ActsAsTaggableOn::Tag',
through: :topic_taggings,
source: :tag
# Overwriting 'topic_list' and 'topic_list=' is necessary to ensure functionality during the background migration [1].
# [1] https://gitlab.com/gitlab-org/gitlab/-/merge_requests/61237
# TODO: remove 'topic_list' and 'topic_list=' once the background migration is complete
# https://gitlab.com/gitlab-org/gitlab/-/issues/331081
def topic_list
# Return both old topics (context 'tags') and new topics (context 'topics')
tag_list_on('tags') + tag_list_on('topics')
end
def topic_list=(new_tags)
# Old topics with context 'tags' are added as new topics with context 'topics'
super(new_tags)
# Remove old topics with context 'tags'
set_tag_list_on('tags', '')
end
attr_accessor :old_path_with_namespace
attr_accessor :template_name

View File

@ -401,16 +401,16 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
end
def topics_to_show
project.topics.take(MAX_TOPICS_TO_SHOW) # rubocop: disable CodeReuse/ActiveRecord
project.topic_list.take(MAX_TOPICS_TO_SHOW) # rubocop: disable CodeReuse/ActiveRecord
end
def topics_not_shown
project.topics - topics_to_show
project.topic_list - topics_to_show
end
def count_of_extra_topics_not_shown
if project.topics.count > MAX_TOPICS_TO_SHOW
project.topics.count - MAX_TOPICS_TO_SHOW
if project.topic_list.count > MAX_TOPICS_TO_SHOW
project.topic_list.count - MAX_TOPICS_TO_SHOW
else
0
end

View File

@ -0,0 +1,5 @@
---
title: Migrate 'tags' to 'topics' for project in the database context
merge_request: 61237
author: Jonas Wälter @wwwjon
type: changed

View File

@ -0,0 +1,8 @@
---
name: diffs_virtual_scrolling
introduced_by_url:
rollout_issue_url:
milestone: '13.12'
type: development
group: group::code review
default_enabled: false

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddTemporaryIndexForProjectTopicsToTaggings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
INDEX_NAME = 'tmp_index_taggings_on_id_where_taggable_type_project_and_tags'
INDEX_CONDITION = "taggable_type = 'Project' AND context = 'tags'"
disable_ddl_transaction!
def up
# this index is used in 20210511095658_schedule_migrate_project_taggings_context_from_tags_to_topics
add_concurrent_index :taggings, :id, where: INDEX_CONDITION, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :taggings, INDEX_NAME
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
class ScheduleMigrateProjectTaggingsContextFromTagsToTopics < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 30_000
DELAY_INTERVAL = 2.minutes
MIGRATION = 'MigrateProjectTaggingsContextFromTagsToTopics'
disable_ddl_transaction!
class Tagging < ActiveRecord::Base
include ::EachBatch
self.table_name = 'taggings'
end
def up
queue_background_migration_jobs_by_range_at_intervals(
Tagging.where(taggable_type: 'Project', context: 'tags'),
MIGRATION,
DELAY_INTERVAL,
batch_size: BATCH_SIZE
)
end
def down
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class RemoveTemporaryIndexForProjectTopicsToTaggings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
INDEX_NAME = 'tmp_index_taggings_on_id_where_taggable_type_project_and_tags'
INDEX_CONDITION = "taggable_type = 'Project' AND context = 'tags'"
disable_ddl_transaction!
def up
# this index was used in 20210511095658_schedule_migrate_project_taggings_context_from_tags_to_topics
remove_concurrent_index_by_name :taggings, INDEX_NAME
end
def down
add_concurrent_index :taggings, :id, where: INDEX_CONDITION, name: INDEX_NAME
end
end

View File

@ -0,0 +1 @@
4d11cdf876786db5e827ea1a50b70e2d5b3814fd7c0b0c083ab61adad9685364

View File

@ -0,0 +1 @@
7387c23bbbc376e26c057179ebe2796be183462acb1fc509d451f0fede13ed93

View File

@ -0,0 +1 @@
ec08c18ac37f2ae7298650df58345755eada20aaa5b7ed3dfd54ee5cea88ebdd

View File

@ -563,6 +563,7 @@ smartcard
smartcards
snapshotting
Sobelow
Solargraph
Solarized
Sourcegraph
sparkline

View File

@ -77,9 +77,9 @@ and [Container Registry](../../../user/packages/container_registry/index.md).
1. Click **Commit Changes**. It automatically triggers a new pipeline. In this pipeline, the `build`
job containerizes the application and pushes the image to [GitLab Container Registry](../../../user/packages/container_registry/index.md).
![Create project](img/initial-pipeline.png)
1. Visit **Packages & Registries > Container Registry**. Make sure the application image has been
pushed.
@ -232,7 +232,7 @@ These variables are injected into the pipeline jobs and can access the ECS API.
Change a file in the project and see if it's reflected in the demo application on ECS:
1. Go to **ecs-demo** project on GitLab.
1. Open the file at **app > views > welcome > index.html.erb**.
1. Open the file at **app > views > welcome > `index.html.erb`**.
1. Click **Edit**.
1. Change the text to `You're on ECS!`.
1. Click **Commit Changes**. This automatically triggers a new pipeline. Wait until it finishes.

View File

@ -116,7 +116,7 @@ Before performing any of these tests, if you have a `k3s` instance running, make
stop it manually before running them. Otherwise, the tests might fail with the message
`failed to remove k3s cluster`.
You might need to specify the correct Agent image version that matches the `kas` image version. You can use the `GITLAB_AGENTK_VERSION` local env for this.
You might need to specify the correct Agent image version that matches the `kas` image version. You can use the `GITLAB_AGENTK_VERSION` local environment for this.
### Against `staging`
@ -124,7 +124,7 @@ You might need to specify the correct Agent image version that matches the `kas`
[this line](https://gitlab.com/gitlab-org/gitlab/-/blob/5b15540ea78298a106150c3a1d6ed26416109b9d/qa/qa/service/cluster_provider/k3s.rb#L8) and
[this line](https://gitlab.com/gitlab-org/gitlab/-/blob/5b15540ea78298a106150c3a1d6ed26416109b9d/qa/qa/service/cluster_provider/k3s.rb#L36).
We don't allow local connections on `staging` as they require an admin user.
1. Ensure you don't have an `EE_LICENSE` env var set as this would force an admin login.
1. Ensure you don't have an `EE_LICENSE` environment variable set as this would force an admin login.
1. Go to your GDK root folder and `cd gitlab/qa`.
1. Login with your user in staging and create a group to be used as sandbox.
Something like: `username-qa-sandbox`.

View File

@ -101,7 +101,7 @@ You can check for any offenses locally with `bundle exec rubocop --parallel`.
On the CI, this is automatically checked by the `static-analysis` jobs.
In addition, you can [integrate RuboCop](../developing_with_solargraph.md) into
supported IDEs using the [solargraph](https://github.com/castwide/solargraph) gem.
supported IDEs using the [Solargraph](https://github.com/castwide/solargraph) gem.
For RuboCop rules that we have not taken a decision on yet, we follow the
[Ruby Style Guide](https://github.com/rubocop-hq/ruby-style-guide),

View File

@ -8,21 +8,21 @@ info: To determine the technical writer assigned to the Stage/Group associated w
Gemfile packages [Solargraph](https://github.com/castwide/solargraph) language server for additional IntelliSense and code formatting capabilities with editors that support it.
Example configuration for solargraph can be found in [.solargraph.yml.example](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.solargraph.yml.example) file. Copy the contents of this file to `.solargraph.yml` file for language server to pick this configuration up. Since `.solargraph.yml` configuration file is ignored by Git, it's possible to adjust configuration according to your needs.
Example configuration for Solargraph can be found in [.solargraph.yml.example](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.solargraph.yml.example) file. Copy the contents of this file to `.solargraph.yml` file for language server to pick this configuration up. Since `.solargraph.yml` configuration file is ignored by Git, it's possible to adjust configuration according to your needs.
Refer to particular IDE plugin documentation on how to integrate it with solargraph language server:
Refer to particular IDE plugin documentation on how to integrate it with Solargraph language server:
- **Visual Studio Code**
- GitHub: [vscode-solargraph](https://github.com/castwide/vscode-solargraph)
- GitHub: [`vscode-solargraph`](https://github.com/castwide/vscode-solargraph)
- **Atom**
- GitHub: [atom-solargraph](https://github.com/castwide/atom-solargraph)
- GitHub: [`atom-solargraph`](https://github.com/castwide/atom-solargraph)
- **Vim**
- GitHub: [LanguageClient-neovim](https://github.com/autozimu/LanguageClient-neovim)
- GitHub: [`LanguageClient-neovim`](https://github.com/autozimu/LanguageClient-neovim)
- **Emacs**
- GitHub: [emacs-solargraph](https://github.com/guskovd/emacs-solargraph)
- GitHub: [`emacs-solargraph`](https://github.com/guskovd/emacs-solargraph)
- **Eclipse**
- GitHub: [eclipse-solargraph](https://github.com/PyvesB/eclipse-solargraph)
- GitHub: [`eclipse-solargraph`](https://github.com/PyvesB/eclipse-solargraph)

View File

@ -59,4 +59,4 @@ feature flags, and there is currently no strong suggestion to use one over the o
Historical Context: `Experimentation Module` was built iteratively with the needs that
appeared while implementing Growth sub-department experiments, while GLEX was built
with the learnings of the team and an easier to use API.
with the findings of the team and an easier to use API.

View File

@ -782,7 +782,7 @@ While the Apollo client has support for simple polling, for performance reasons,
Once the backend is set up, there are a few changes to make on the frontend.
First, get your resource Etag path from the backend. In the example of the pipelines graph, this is called the `graphql_resource_etag`. This will be used to create new headers to add to the Apollo context:
First, get your resource ETag path from the backend. In the example of the pipelines graph, this is called the `graphql_resource_etag`. This will be used to create new headers to add to the Apollo context:
```javascript
/* pipelines/components/graph/utils.js */
@ -817,7 +817,7 @@ apollo: {
},
```
Then, because Etags depend on the request being a `GET` instead of GraphQL's usual `POST`, but our default link library does not support `GET` we need to let our default Apollo client know to use a different library.
Then, because ETags depend on the request being a `GET` instead of GraphQL's usual `POST`, but our default link library does not support `GET` we need to let our default Apollo client know to use a different library.
```javascript
/* componentMountIndex.js */

View File

@ -115,7 +115,7 @@ operator](https://thoughtbot.com/blog/rubys-pessimistic-operator))
making it possible to upgrade `license_finder` or any other gem to a
version that depends on `thor 1.2`.
Simlarly, if `license_finder` had a vulnerability fixed in 6.0.1, we
Similarly, if `license_finder` had a vulnerability fixed in 6.0.1, we
should add:
```ruby
@ -127,7 +127,7 @@ still depend on a newer version of `thor`, such as `6.0.2`, but would
not be able to depend on the vulnerable version `6.0.0`.
A downgrade like that could happen if we introduced a new dependency
that also relied on thor but had its version pinned to a vulnerable
that also relied on `thor` but had its version pinned to a vulnerable
one. These changes are easy to miss in the `Gemfile.lock`. Pinning the
version would result in a conflict that would need to be solved.

View File

@ -501,7 +501,7 @@ up to run `goimports -local gitlab.com/gitlab-org` so that it's applied to every
### Analyzer Tests
The conventional Secure [analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/) has a [`convert` function](https://gitlab.com/gitlab-org/security-products/analyzers/command/-/blob/main/convert.go#L15-17) that converts SAST/DAST scanner reports into [GitLab Security Reports](https://gitlab.com/gitlab-org/security-products/security-report-schemas). When writing tests for the `convert` function, we should make use of [test fixtures](https://dave.cheney.net/2016/05/10/test-fixtures-in-go) using a `testdata` directory at the root of the analyzer's repo. The `testdata` directory should contain two subdirectories: `expect` and `reports`. The `reports` directory should contain sample SAST/DAST scanner reports which are passed into the `convert` function during the test setup. The `expect` directory should contain the expected GitLab Security Report that the `convert` returns. See Secret Detection for an [example](https://gitlab.com/gitlab-org/security-products/analyzers/secrets/-/blob/160424589ef1eed7b91b59484e019095bc7233bd/convert_test.go#L13-66).
The conventional Secure [analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/) has a [`convert` function](https://gitlab.com/gitlab-org/security-products/analyzers/command/-/blob/main/convert.go#L15-17) that converts SAST/DAST scanner reports into [GitLab Security Reports](https://gitlab.com/gitlab-org/security-products/security-report-schemas). When writing tests for the `convert` function, we should make use of [test fixtures](https://dave.cheney.net/2016/05/10/test-fixtures-in-go) using a `testdata` directory at the root of the analyzer's repository. The `testdata` directory should contain two subdirectories: `expect` and `reports`. The `reports` directory should contain sample SAST/DAST scanner reports which are passed into the `convert` function during the test setup. The `expect` directory should contain the expected GitLab Security Report that the `convert` returns. See Secret Detection for an [example](https://gitlab.com/gitlab-org/security-products/analyzers/secrets/-/blob/160424589ef1eed7b91b59484e019095bc7233bd/convert_test.go#L13-66).
If the scanner report is small, less than 35 lines, then feel free to [inline the report](https://gitlab.com/gitlab-org/security-products/analyzers/sobelow/-/blob/8bd2428a/convert/convert_test.go#L13-77) rather than use a `testdata` directory.

View File

@ -25,7 +25,7 @@ The dashboards for stage groups are at a very early stage. All contributions are
Read more about how we are using error budgets overall in our
[handbook](https://about.gitlab.com/handbook/engineering/error-budgets/).
By default, the first row of panels on the dashbhoard will show the [error
By default, the first row of panels on the dashboard will show the [error
budget for the stage
group](https://about.gitlab.com/handbook/engineering/error-budgets/#budget-spend-by-stage-group). This
row shows how the features owned by

View File

@ -1302,7 +1302,7 @@ A good guideline to follow: the more complex the component you may want to steer
- To capture large data structures just to have something
- To just have some kind of test written
- To capture highly volatile ui elements without stubbing them (Think of GitLab UI version updates)
- To capture highly volatile UI elements without stubbing them (Think of GitLab UI version updates)
---

View File

@ -48,7 +48,7 @@ Product Intelligence files.
[Metrics Dictionary](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/development/usage_ping/dictionary.md) if it is needed.
- Add a changelog [according to guidelines](../changelog.md).
##### When adding or modifiying Snowplow events
##### When adding or modifying Snowplow events
- For frontend events, when relevant, add a screenshot of the event in
the [testing tool](../snowplow/index.md#developing-and-testing-snowplow) used.
@ -81,7 +81,7 @@ Any of the Product Intelligence engineers can be assigned for the Product Intell
- Check if a [feature flag is needed](index.md#recommendations).
- For tracking with Snowplow:
- Check that the [event taxonomy](../snowplow/index.md#structured-event-taxonomy) is correct.
- Check the [usage recomendations](../snowplow/index.md#usage-recommendations).
- Check the [usage recommendations](../snowplow/index.md#usage-recommendations).
- Metrics YAML definitions:
- Check the metric `description`.
- Check the metrics `key_path`.

View File

@ -378,7 +378,7 @@ low may lead the reindexing process to take a very long time to complete.
The best value for this will depend on your cluster size, whether you're willing
to accept some degraded search performance during reindexing, and how important
it is for the reindex to finish quickly and unpause indexing.
it is for the reindex to finish quickly and resume indexing.
### Mark the most recent reindex job as failed and resume the indexing

View File

@ -148,7 +148,7 @@ If you upgrade your GitLab instance while the GitLab Runner is processing jobs,
As for the artifacts, the GitLab Runner will attempt to upload them three times, after which the job will eventually fail.
To address the above two scenario's, it is adviced to do the following prior to upgrading:
To address the above two scenario's, it is advised to do the following prior to upgrading:
1. Plan your maintenance.
1. Pause your runners.

View File

@ -50,7 +50,7 @@ The following table shows the supported metrics, at which level they are support
| Metric | Level | API version | Chart (UI) version | Comments |
| --------------- | ----------- | --------------- | ---------- | ------- |
| `deployment_frequency` | Project-level | [13.7+](../../api/dora/metrics.md) | [13.8+](#deployment-frequency-charts) | The [old API endopint](../../api/dora4_project_analytics.md) was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/323713) in 13.10. |
| `deployment_frequency` | Project-level | [13.7+](../../api/dora/metrics.md) | [13.8+](#deployment-frequency-charts) | The [old API endpoint](../../api/dora4_project_analytics.md) was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/323713) in 13.10. |
| `deployment_frequency` | Group-level | [13.10+](../../api/dora/metrics.md) | To be supported | |
| `lead_time_for_changes` | Project-level | [13.10+](../../api/dora/metrics.md) | [13.11+](#lead-time-charts) | Unit in seconds. Aggregation method is median. |
| `lead_time_for_changes` | Group-level | [13.10+](../../api/dora/metrics.md) | To be supported | Unit in seconds. Aggregation method is median. |

View File

@ -324,7 +324,7 @@ To allowlist specific vulnerabilities, follow these steps:
1. Set `GIT_STRATEGY: fetch` in your `.gitlab-ci.yml` file by following the instructions in
[overriding the container scanning template](#overriding-the-container-scanning-template).
1. Define the allowlisted vulnerabilities in a YAML file named `vulnerability-allowlist.yml`. This must use
the format described in [vulnerability-allowlist.yml data format](#vulnerability-allowlistyml-data-format).
the format described in [`vulnerability-allowlist.yml` data format](#vulnerability-allowlistyml-data-format).
1. Add the `vulnerability-allowlist.yml` file to the root folder of your project's Git repository.
#### vulnerability-allowlist.yml data format
@ -365,9 +365,9 @@ This example excludes from `gl-container-scanning-report.json`:
You can specify container image in multiple ways:
- as image name only (ie. `centos`).
- as full image name with registry hostname (ie. `your.private.registry:5000/centos`).
- as full image name with registry hostname and sha256 label (ie. `registry.gitlab.com/gitlab-org/security-products/dast/webgoat-8.0@sha256`).
- as image name only (such as `centos`).
- as full image name with registry hostname (such as `your.private.registry:5000/centos`).
- as full image name with registry hostname and sha256 label (such as `registry.gitlab.com/gitlab-org/security-products/dast/webgoat-8.0@sha256`).
NOTE:
The string after CVE ID (`cups` and `libxml2` in the previous example) is an optional comment format. It has **no impact** on the handling of vulnerabilities. You can include comments to describe the vulnerability.

View File

@ -594,7 +594,7 @@ can be added, removed, and modified by creating a custom configuration.
- Application Information Check
- Cleartext Authentication Check
- FrameworkDebugModeCheck
- Html Injection Check
- HTML Injection Check
- Insecure Http Methods Check
- JSON Hijacking Check
- JSON Injection Check
@ -602,16 +602,16 @@ can be added, removed, and modified by creating a custom configuration.
- Session Cookie Check
- SQL Injection Check
- Token Check
- Xml Injection Check
- XML Injection Check
##### Full
- Application Information Check
- Cleartext AuthenticationCheck
- Cors Check
- Dns Rebinding Check
- CORS Check
- DNS Rebinding Check
- Framework Debug Mode Check
- Html Injection Check
- HTML Injection Check
- Insecure Http Methods Check
- JSON Hijacking Check
- JSON Injection Check
@ -620,9 +620,9 @@ can be added, removed, and modified by creating a custom configuration.
- Sensitive Information Check
- Session Cookie Check
- SQL Injection Check
- Tls Configuration Check
- TLS Configuration Check
- Token Check
- Xml Injection Check
- XML Injection Check
### Available CI/CD variables

View File

@ -510,7 +510,7 @@ ensure that it can reach your private repository. Here is an example configurati
## Hosting a copy of the gemnasium_db advisory database
The [gemnasium_db](https://gitlab.com/gitlab-org/security-products/gemnasium-db) Git repository is
The [`gemnasium_db`](https://gitlab.com/gitlab-org/security-products/gemnasium-db) Git repository is
used by `gemnasium`, `gemnasium-maven`, and `gemnasium-python` as the source of vulnerability data.
This repository updates at scan time to fetch the latest advisories. However, due to a restricted
networking environment, running this update is sometimes not possible. In this case, a user can do

View File

@ -710,7 +710,7 @@ documentation for instructions.
## Running SAST in SELinux
By default SAST analyzers are supported in GitLab instances hosted on SELinux. Adding a `before_script` in an [overriden SAST job](#overriding-sast-jobs) may not work as runners hosted on SELinux have restricted permissions.
By default SAST analyzers are supported in GitLab instances hosted on SELinux. Adding a `before_script` in an [overridden SAST job](#overriding-sast-jobs) may not work as runners hosted on SELinux have restricted permissions.
## Troubleshooting

View File

@ -172,7 +172,7 @@ the Agent in subsequent steps. You can create an Agent record with GraphQL:
WARNING:
GraphQL only displays the token and ids **one time** after creating it. Make sure to write down the `secret`, `clusterAgentId`, and `clusterAgentTokenId`; you'll need them later.
If you are new to using the GitLab GraphQL API, refer to the
[Getting started with the GraphQL API page](../../../api/graphql/getting_started.md),
or the [GraphQL Explorer](https://gitlab.com/-/graphql-explorer).
@ -562,7 +562,7 @@ is unknown to the agent. One approach to fixing it is to present the CA certific
via a Kubernetes `configmap` and mount the file in the agent `/etc/ssl/certs` directory from where it
will be picked up automatically.
For example, if your internal CA certifciate is `myCA.pem`:
For example, if your internal CA certificate is `myCA.pem`:
```plaintext
kubectl -n gitlab-kubernetes-agent create configmap ca-pemstore --from-file=myCA.pem
@ -632,7 +632,7 @@ Alternatively, you can mount the certificate file at a different location and in
mutation deleteAgent {
clusterAgentDelete(input: { id: "<cluster-agent-id>" } ) {
errors
}
}
}
mutation deleteToken {
@ -645,7 +645,7 @@ Alternatively, you can mount the certificate file at a different location and in
1. Verify whether the removal occurred successfully. If the output in the Pod logs includes `unauthenticated`, it means that the agent was successfully removed:
```json
{"level":"warn","time":"2021-04-29T23:44:07.598Z","msg":"GetConfiguration.Recv failed","error":"rpc error:
{"level":"warn","time":"2021-04-29T23:44:07.598Z","msg":"GetConfiguration.Recv failed","error":"rpc error:
code = Unauthenticated desc = unauthenticated"}
```

View File

@ -114,7 +114,7 @@ To disable it:
Feature.disable(:group_devops_adoption)
```
To reenable it:
To re-enable it:
```ruby
Feature.enable(:group_devops_adoption)

View File

@ -69,22 +69,6 @@ Example response:
}
```
Example request using a deploy token:
```shell
curl --header "DEPLOY-TOKEN: <deploy_token>" \
--upload-file path/to/file.txt \
"https://gitlab.example.com/api/v4/projects/24/packages/generic/my_package/0.0.1/file.txt?status=hidden"
```
Example response:
```json
{
"message":"201 Created"
}
```
## Download package file
Download a package file.

View File

@ -86,7 +86,7 @@ Put the following code in the file:
service: gitlab-example
provider:
name: aws
runtime: nodejs10.x
runtime: nodejs14.x
functions:
hello:

View File

@ -167,7 +167,7 @@ If the key is **publicly accessible**, it will be removed from the project, but
If the key is **privately accessible** and only in use by this project, it will deleted.
If the key is **privately accessible** and in use by other projects, it will be removed from the project, but still available under **Privately accesible deploy keys**.
If the key is **privately accessible** and in use by other projects, it will be removed from the project, but still available under **Privately accessible deploy keys**.
## Troubleshooting

View File

@ -130,20 +130,12 @@ To pull packages in the GitLab package registry, you must:
1. For the [package type of your choice](../../packages/index.md), follow the
authentication instructions for deploy tokens.
Example request publishing a generic package using a deploy token:
Example request publishing a NuGet package using a deploy token:
```shell
curl --header "DEPLOY-TOKEN: <deploy_token>" \
--upload-file path/to/file.txt \
"https://gitlab.example.com/api/v4/projects/24/packages/generic/my_package/0.0.1/file.txt?status=hidden"
```
nuget source Add -Name GitLab -Source "https://gitlab.example.com/api/v4/projects/10/packages/nuget/index.json" -UserName deploy-token-username -Password 12345678asdf
Example response:
```json
{
"message":"201 Created"
}
nuget push mypkg.nupkg -Source GitLab
```
### Push or upload packages

View File

@ -15,7 +15,7 @@ Hangouts).
## How it works
To enable this integration, first you need to create a webhook for the room in
Google Chat where you want to receive the nofications from your project.
Google Chat where you want to receive the notifications from your project.
After that, enable the integration in GitLab and choose the events you want to
be notified about in your Google Chat room.

View File

@ -103,7 +103,7 @@ Sometimes when you have hundreds of branches you may want a more flexible matchi
![Before swap revisions](img/swap_revisions_before_v13_12.png)
The Swap revisions feature allows you to swap the Source and Target revisions. When the Swap revisions button is clicked, the selected revisions for Source and Targed will be swapped.
The Swap revisions feature allows you to swap the Source and Target revisions. When the Swap revisions button is clicked, the selected revisions for Source and Target will be swapped.
![After swap revisions](img/swap_revisions_after_v13_12.png)

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# The class to migrate the context of project taggings from `tags` to `topics`
class MigrateProjectTaggingsContextFromTagsToTopics
# Temporary AR table for taggings
class Tagging < ActiveRecord::Base
include EachBatch
self.table_name = 'taggings'
end
def perform(start_id, stop_id)
Tagging.where(taggable_type: 'Project', context: 'tags', id: start_id..stop_id).each_batch(of: 500) do |relation|
relation.update_all(context: 'topics')
end
end
end
end
end

View File

@ -153,6 +153,7 @@ excluded_attributes:
- :bfg_object_map
- :detected_repository_languages
- :tag_list
- :topic_list
- :mirror_user_id
- :mirror_trigger_builds
- :only_mirror_protected_branches

View File

@ -152,6 +152,7 @@
"prosemirror-model": "^1.13.3",
"raphael": "^2.2.7",
"raw-loader": "^4.0.2",
"scrollparent": "^2.0.1",
"select2": "3.5.2-browserify",
"smooshpack": "^0.0.62",
"sortablejs": "^1.10.2",
@ -171,6 +172,8 @@
"vue": "^2.6.12",
"vue-apollo": "^3.0.3",
"vue-loader": "^15.9.6",
"vue-observe-visibility": "^1.0.0",
"vue-resize": "^1.0.1",
"vue-router": "3.4.9",
"vue-template-compiler": "^2.6.12",
"vue-virtual-scroll-list": "^1.4.7",

View File

@ -7,15 +7,12 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
let_it_be(:group) { create(:group, name: 'Owned') }
let_it_be(:project) { create(:project, :repository, namespace: group) }
let(:user) { create(:user, email: 'user@example.com') }
let(:group_invite) { group.group_members.invite.last }
before do
stub_application_setting(require_admin_approval_after_user_signup: false)
project.add_maintainer(owner)
group.add_owner(owner)
group.add_developer('user@example.com', owner)
group_invite.generate_invite_token!
end
def confirm_email(new_user)
@ -45,45 +42,128 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
click_button 'Get started!'
end
context 'when signed out' do
context 'when inviting a registered user' do
let(:invite_email) { 'user@example.com' }
before do
visit invite_path(group_invite.raw_invite_token)
group.add_developer(invite_email, owner)
group_invite.generate_invite_token!
end
it 'renders sign up page with sign up notice' do
expect(current_path).to eq(new_user_registration_path)
expect(page).to have_content('To accept this invitation, create an account or sign in')
end
context 'when signed out' do
context 'when analyzing the redirects and forms from invite link click' do
before do
visit invite_path(group_invite.raw_invite_token)
end
it 'pre-fills the "Username or email" field on the sign in box with the invite_email from the invite' do
click_link 'Sign in'
it 'renders sign up page with sign up notice' do
expect(current_path).to eq(new_user_registration_path)
expect(page).to have_content('To accept this invitation, create an account or sign in')
end
expect(find_field('Username or email').value).to eq(group_invite.invite_email)
end
it 'pre-fills the "Username or email" field on the sign in box with the invite_email from the invite' do
click_link 'Sign in'
it 'pre-fills the Email field on the sign up box with the invite_email from the invite' do
expect(find_field('Email').value).to eq(group_invite.invite_email)
end
expect(find_field('Username or email').value).to eq(group_invite.invite_email)
end
it 'sign in, grants access and redirects to group activity page' do
click_link 'Sign in'
it 'pre-fills the Email field on the sign up box with the invite_email from the invite' do
expect(find_field('Email').value).to eq(group_invite.invite_email)
end
end
fill_in_sign_in_form(user)
context 'when invite is sent before account is created - ldap or social sign in for manual acceptance edge case' do
let(:user) { create(:user, email: 'user@example.com') }
expect(current_path).to eq(activity_group_path(group))
end
end
context 'when invite clicked and not signed in' do
before do
visit invite_path(group_invite.raw_invite_token)
end
context 'when signed in as an existing member' do
before do
sign_in(owner)
end
it 'sign in, grants access and redirects to group activity page' do
click_link 'Sign in'
it 'shows message user already a member' do
visit invite_path(group_invite.raw_invite_token)
fill_in_sign_in_form(user)
expect(page).to have_link(owner.name, href: user_url(owner))
expect(page).to have_content('However, you are already a member of this group.')
expect(current_path).to eq(activity_group_path(group))
end
end
context 'when signed in and an invite link is clicked' do
context 'when an invite email is a secondary email for the user' do
let(:invite_email) { 'user_secondary@example.com' }
before do
sign_in(user)
visit invite_path(group_invite.raw_invite_token)
end
it 'sends user to the invite url and allows them to decline' do
expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
expect(page).to have_content("Note that this invitation was sent to #{invite_email}")
expect(page).to have_content("but you are signed in as #{user.to_reference} with email #{user.email}")
click_link('Decline')
expect(page).to have_content('You have declined the invitation')
expect(current_path).to eq(dashboard_projects_path)
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
it 'sends uer to the invite url and allows them to accept' do
expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
expect(page).to have_content("Note that this invitation was sent to #{invite_email}")
expect(page).to have_content("but you are signed in as #{user.to_reference} with email #{user.email}")
click_link('Accept invitation')
expect(page).to have_content('You have been granted')
expect(current_path).to eq(activity_group_path(group))
end
end
context 'when user is an existing member' do
before do
sign_in(owner)
visit invite_path(group_invite.raw_invite_token)
end
it 'shows message user already a member' do
expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
expect(page).to have_link(owner.name, href: user_url(owner))
expect(page).to have_content('However, you are already a member of this group.')
end
end
end
context 'when declining the invitation from invitation reminder email' do
context 'when signed in' do
before do
sign_in(user)
visit decline_invite_path(group_invite.raw_invite_token)
end
it 'declines application and redirects to dashboard' do
expect(current_path).to eq(dashboard_projects_path)
expect(page).to have_content('You have declined the invitation to join group Owned.')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
context 'when signed out with signup onboarding' do
before do
visit decline_invite_path(group_invite.raw_invite_token)
end
it 'declines application and redirects to sign in page' do
expect(current_path).to eq(decline_invite_path(group_invite.raw_invite_token))
expect(page).not_to have_content('You have declined the invitation to join')
expect(page).to have_content('You successfully declined the invitation')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
end
end
end
end
@ -243,63 +323,13 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
end
context 'when declining the invitation' do
context 'as an existing user' do
let(:group_invite) { create(:group_member, user: user, group: group, created_by: owner) }
context 'when declining the invitation from invitation reminder email' do
it 'declines application and shows a decline page' do
visit decline_invite_path(group_invite.raw_invite_token)
context 'when signed in' do
before do
sign_in(user)
visit decline_invite_path(group_invite.raw_invite_token)
end
it 'declines application and redirects to dashboard' do
expect(current_path).to eq(dashboard_projects_path)
expect(page).to have_content('You have declined the invitation to join group Owned.')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
context 'when signed out' do
before do
visit decline_invite_path(group_invite.raw_invite_token)
end
it 'declines application and redirects to sign in page' do
expect(current_path).to eq(new_user_session_path)
expect(page).to have_content('You have declined the invitation to join group Owned.')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
end
context 'as a non-existing user' do
before do
visit decline_invite_path(group_invite.raw_invite_token)
end
it 'declines application and shows a decline page' do
expect(current_path).to eq(decline_invite_path(group_invite.raw_invite_token))
expect(page).to have_content('You successfully declined the invitation')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
end
context 'when accepting the invitation as an existing user' do
before do
sign_in(user)
visit invite_path(group_invite.raw_invite_token)
end
it 'grants access and redirects to the group activity page' do
expect(group.users.include?(user)).to be false
page.click_link 'Accept invitation'
expect(current_path).to eq(activity_group_path(group))
expect(page).to have_content('You have been granted Owner access to group Owned.')
expect(group.users.include?(user)).to be true
expect(current_path).to eq(decline_invite_path(group_invite.raw_invite_token))
expect(page).to have_content('You successfully declined the invitation')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
end

View File

@ -139,7 +139,7 @@ RSpec.describe ProjectsFinder do
describe 'filter by tags' do
before do
public_project.tag_list.add('foo')
public_project.tag_list = 'foo'
public_project.save!
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MigrateProjectTaggingsContextFromTagsToTopics, schema: 20210511095658 do
it 'correctly migrates project taggings context from tags to topics' do
taggings = table(:taggings)
project_old_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'tags')
project_new_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'topics')
project_other_context_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'other')
project_old_tagging_2 = taggings.create!(taggable_type: 'Project', context: 'tags')
project_old_tagging_3 = taggings.create!(taggable_type: 'Project', context: 'tags')
subject.perform(project_old_tagging_1.id, project_old_tagging_2.id)
project_old_tagging_1.reload
project_new_tagging_1.reload
project_other_context_tagging_1.reload
project_old_tagging_2.reload
project_old_tagging_3.reload
expect(project_old_tagging_1.context).to eq('topics')
expect(project_new_tagging_1.context).to eq('topics')
expect(project_other_context_tagging_1.context).to eq('other')
expect(project_old_tagging_2.context).to eq('topics')
expect(project_old_tagging_3.context).to eq('tags')
end
end

View File

@ -343,8 +343,9 @@ project:
- external_approval_rules
- taggings
- base_tags
- tag_taggings
- tags
- topic_taggings
- topics
- chat_services
- cluster
- clusters

View File

@ -6964,6 +6964,55 @@ RSpec.describe Project, factory_default: :keep do
end
end
describe 'topics' do
let_it_be(:project) { create(:project, tag_list: 'topic1, topic2, topic3') }
it 'topic_list returns correct string array' do
expect(project.topic_list).to match_array(%w[topic1 topic2 topic3])
end
it 'topics returns correct tag records' do
expect(project.topics.first.class.name).to eq('ActsAsTaggableOn::Tag')
expect(project.topics.map(&:name)).to match_array(%w[topic1 topic2 topic3])
end
context 'aliases' do
it 'tag_list returns correct string array' do
expect(project.tag_list).to match_array(%w[topic1 topic2 topic3])
end
it 'tags returns correct tag records' do
expect(project.tags.first.class.name).to eq('ActsAsTaggableOn::Tag')
expect(project.tags.map(&:name)).to match_array(%w[topic1 topic2 topic3])
end
end
context 'intermediate state during background migration' do
before do
project.taggings.first.update!(context: 'tags')
project.instance_variable_set("@tag_list", nil)
project.reload
end
it 'tag_list returns string array including old and new topics' do
expect(project.tag_list).to match_array(%w[topic1 topic2 topic3])
end
it 'tags returns old and new tag records' do
expect(project.tags.first.class.name).to eq('ActsAsTaggableOn::Tag')
expect(project.tags.map(&:name)).to match_array(%w[topic1 topic2 topic3])
expect(project.taggings.map(&:context)).to match_array(%w[tags topics topics])
end
it 'update tag_list adds new topics and removes old topics' do
project.update!(tag_list: 'topic1, topic2, topic3, topic4')
expect(project.tags.map(&:name)).to match_array(%w[topic1 topic2 topic3 topic4])
expect(project.taggings.map(&:context)).to match_array(%w[topics topics topics topics])
end
end
end
def finish_job(export_job)
export_job.start
export_job.finish

View File

@ -41,6 +41,7 @@ itself: # project
- reset_approvals_on_push
- runners_token_encrypted
- storage_version
- topic_list
- updated_at
remapped_attributes:
avatar: avatar_url
@ -67,6 +68,7 @@ itself: # project
- readme_url
- shared_with_groups
- ssh_url_to_repo
- tag_list
- web_url
build_auto_devops: # auto_devops

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Groups::AutocompleteSourcesController do
RSpec.describe 'groups autocomplete' do
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:group) { create(:group, :private) }
@ -35,9 +35,8 @@ RSpec.describe Groups::AutocompleteSourcesController do
with_them do
it 'returns the correct response', :aggregate_failures do
issues = Array(expected).flat_map { |sym| public_send(sym) }
params = { group_id: group, issue_types: issue_types }.compact
get :issues, params: params
get issues_group_autocomplete_sources_path(group, issue_types: issue_types)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an(Array)
@ -57,7 +56,7 @@ RSpec.describe Groups::AutocompleteSourcesController do
create(:milestone, group: sub_group)
group_milestone = create(:milestone, group: group)
get :milestones, params: { group_id: group }
get milestones_group_autocomplete_sources_path(group)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq(1)

View File

@ -268,6 +268,7 @@ RSpec.configure do |config|
stub_feature_flags(file_identifier_hash: false)
stub_feature_flags(unified_diff_components: false)
stub_feature_flags(diffs_virtual_scrolling: false)
# The following `vue_issues_list`/`vue_issuables_list` stubs can be removed
# once the Vue issues page has feature parity with the current Haml page

View File

@ -0,0 +1,40 @@
{
"name": "vue-virtual-scroller",
"description": "Smooth scrolling for any amount of data",
"version": "1.0.10",
"author": {
"name": "Guillaume Chau",
"email": "guillaume.b.chau@gmail.com"
},
"keywords": [
"vue",
"vuejs",
"plugin"
],
"license": "MIT",
"main": "src/index.js",
"scripts": {},
"repository": {
"type": "git",
"url": "git+https://github.com/Akryum/vue-virtual-scroller.git"
},
"bugs": {
"url": "https://github.com/Akryum/vue-virtual-scroller/issues"
},
"homepage": "https://github.com/Akryum/vue-virtual-scroller#readme",
"dependencies": {
"scrollparent": "^2.0.1",
"vue-observe-visibility": "^0.4.4",
"vue-resize": "^0.4.5"
},
"peerDependencies": {
"vue": "^2.6.11"
},
"devDependencies": {
},
"browserslist": [
"> 1%",
"last 2 versions",
"not ie <= 8"
]
}

View File

@ -0,0 +1,212 @@
<template>
<RecycleScroller
ref="scroller"
:items="itemsWithSize"
:min-item-size="minItemSize"
:direction="direction"
key-field="id"
v-bind="$attrs"
@resize="onScrollerResize"
@visible="onScrollerVisible"
v-on="listeners"
>
<template slot-scope="{ item: itemWithSize, index, active }">
<slot
v-bind="{
item: itemWithSize.item,
index,
active,
itemWithSize
}"
/>
</template>
<template slot="before">
<slot name="before" />
</template>
<template slot="after">
<slot name="after" />
</template>
</RecycleScroller>
</template>
<script>
import RecycleScroller from './RecycleScroller.vue'
import { props, simpleArray } from './common'
export default {
name: 'DynamicScroller',
components: {
RecycleScroller,
},
inheritAttrs: false,
provide () {
if (typeof ResizeObserver !== 'undefined') {
this.$_resizeObserver = new ResizeObserver(entries => {
for (const entry of entries) {
if (entry.target) {
const event = new CustomEvent(
'resize',
{
detail: {
contentRect: entry.contentRect,
},
},
)
entry.target.dispatchEvent(event)
}
}
})
}
return {
vscrollData: this.vscrollData,
vscrollParent: this,
vscrollResizeObserver: this.$_resizeObserver,
}
},
props: {
...props,
minItemSize: {
type: [Number, String],
required: true,
},
},
data () {
return {
vscrollData: {
active: true,
sizes: {},
validSizes: {},
keyField: this.keyField,
simpleArray: false,
},
}
},
computed: {
simpleArray,
itemsWithSize () {
const result = []
const { items, keyField, simpleArray } = this
const sizes = this.vscrollData.sizes
for (let i = 0; i < items.length; i++) {
const item = items[i]
const id = simpleArray ? i : item[keyField]
let size = sizes[id]
if (typeof size === 'undefined' && !this.$_undefinedMap[id]) {
size = 0
}
result.push({
item,
id,
size,
})
}
return result
},
listeners () {
const listeners = {}
for (const key in this.$listeners) {
if (key !== 'resize' && key !== 'visible') {
listeners[key] = this.$listeners[key]
}
}
return listeners
},
},
watch: {
items () {
this.forceUpdate(false)
},
simpleArray: {
handler (value) {
this.vscrollData.simpleArray = value
},
immediate: true,
},
direction (value) {
this.forceUpdate(true)
},
},
created () {
this.$_updates = []
this.$_undefinedSizes = 0
this.$_undefinedMap = {}
},
activated () {
this.vscrollData.active = true
},
deactivated () {
this.vscrollData.active = false
},
methods: {
onScrollerResize () {
const scroller = this.$refs.scroller
if (scroller) {
this.forceUpdate()
}
this.$emit('resize')
},
onScrollerVisible () {
this.$emit('vscroll:update', { force: false })
this.$emit('visible')
},
forceUpdate (clear = true) {
if (clear || this.simpleArray) {
this.vscrollData.validSizes = {}
}
this.$emit('vscroll:update', { force: true })
},
scrollToItem (index) {
const scroller = this.$refs.scroller
if (scroller) scroller.scrollToItem(index)
},
getItemSize (item, index = undefined) {
const id = this.simpleArray ? (index != null ? index : this.items.indexOf(item)) : item[this.keyField]
return this.vscrollData.sizes[id] || 0
},
scrollToBottom () {
if (this.$_scrollingToBottom) return
this.$_scrollingToBottom = true
const el = this.$el
// Item is inserted to the DOM
this.$nextTick(() => {
el.scrollTop = el.scrollHeight + 5000
// Item sizes are computed
const cb = () => {
el.scrollTop = el.scrollHeight + 5000
requestAnimationFrame(() => {
el.scrollTop = el.scrollHeight + 5000
if (this.$_undefinedSizes === 0) {
this.$_scrollingToBottom = false
} else {
requestAnimationFrame(cb)
}
})
}
requestAnimationFrame(cb)
})
},
},
}
</script>

View File

@ -0,0 +1,218 @@
<script>
export default {
name: 'DynamicScrollerItem',
inject: [
'vscrollData',
'vscrollParent',
'vscrollResizeObserver',
],
props: {
// eslint-disable-next-line vue/require-prop-types
item: {
required: true,
},
watchData: {
type: Boolean,
default: false,
},
/**
* Indicates if the view is actively used to display an item.
*/
active: {
type: Boolean,
required: true,
},
index: {
type: Number,
default: undefined,
},
sizeDependencies: {
type: [Array, Object],
default: null,
},
emitResize: {
type: Boolean,
default: false,
},
tag: {
type: String,
default: 'div',
},
},
computed: {
id () {
return this.vscrollData.simpleArray ? this.index : this.item[this.vscrollData.keyField]
},
size () {
return (this.vscrollData.validSizes[this.id] && this.vscrollData.sizes[this.id]) || 0
},
finalActive () {
return this.active && this.vscrollData.active
},
},
watch: {
watchData: 'updateWatchData',
id () {
if (!this.size) {
this.onDataUpdate()
}
},
finalActive (value) {
if (!this.size) {
if (value) {
if (!this.vscrollParent.$_undefinedMap[this.id]) {
this.vscrollParent.$_undefinedSizes++
this.vscrollParent.$_undefinedMap[this.id] = true
}
} else {
if (this.vscrollParent.$_undefinedMap[this.id]) {
this.vscrollParent.$_undefinedSizes--
this.vscrollParent.$_undefinedMap[this.id] = false
}
}
}
if (this.vscrollResizeObserver) {
if (value) {
this.observeSize()
} else {
this.unobserveSize()
}
} else if (value && this.$_pendingVScrollUpdate === this.id) {
this.updateSize()
}
},
},
created () {
if (this.$isServer) return
this.$_forceNextVScrollUpdate = null
this.updateWatchData()
if (!this.vscrollResizeObserver) {
for (const k in this.sizeDependencies) {
this.$watch(() => this.sizeDependencies[k], this.onDataUpdate)
}
this.vscrollParent.$on('vscroll:update', this.onVscrollUpdate)
this.vscrollParent.$on('vscroll:update-size', this.onVscrollUpdateSize)
}
},
mounted () {
if (this.vscrollData.active) {
this.updateSize()
this.observeSize()
}
},
beforeDestroy () {
this.vscrollParent.$off('vscroll:update', this.onVscrollUpdate)
this.vscrollParent.$off('vscroll:update-size', this.onVscrollUpdateSize)
this.unobserveSize()
},
methods: {
updateSize () {
if (this.finalActive) {
if (this.$_pendingSizeUpdate !== this.id) {
this.$_pendingSizeUpdate = this.id
this.$_forceNextVScrollUpdate = null
this.$_pendingVScrollUpdate = null
this.computeSize(this.id)
}
} else {
this.$_forceNextVScrollUpdate = this.id
}
},
updateWatchData () {
if (this.watchData) {
this.$_watchData = this.$watch('data', () => {
this.onDataUpdate()
}, {
deep: true,
})
} else if (this.$_watchData) {
this.$_watchData()
this.$_watchData = null
}
},
onVscrollUpdate ({ force }) {
// If not active, sechedule a size update when it becomes active
if (!this.finalActive && force) {
this.$_pendingVScrollUpdate = this.id
}
if (this.$_forceNextVScrollUpdate === this.id || force || !this.size) {
this.updateSize()
}
},
onDataUpdate () {
this.updateSize()
},
computeSize (id) {
this.$nextTick(() => {
if (this.id === id) {
const width = this.$el.offsetWidth
const height = this.$el.offsetHeight
this.applySize(width, height)
}
this.$_pendingSizeUpdate = null
})
},
applySize (width, height) {
const size = Math.round(this.vscrollParent.direction === 'vertical' ? height : width)
if (size && this.size !== size) {
if (this.vscrollParent.$_undefinedMap[this.id]) {
this.vscrollParent.$_undefinedSizes--
this.vscrollParent.$_undefinedMap[this.id] = undefined
}
this.$set(this.vscrollData.sizes, this.id, size)
this.$set(this.vscrollData.validSizes, this.id, true)
if (this.emitResize) this.$emit('resize', this.id)
}
},
observeSize () {
if (!this.vscrollResizeObserver) return
this.vscrollResizeObserver.observe(this.$el.parentNode)
this.$el.parentNode.addEventListener('resize', this.onResize)
},
unobserveSize () {
if (!this.vscrollResizeObserver) return
this.vscrollResizeObserver.unobserve(this.$el.parentNode)
this.$el.parentNode.removeEventListener('resize', this.onResize)
},
onResize (event) {
const { width, height } = event.detail.contentRect
this.applySize(width, height)
},
},
render (h) {
return h(this.tag, this.$slots.default)
},
}
</script>

View File

@ -0,0 +1,657 @@
<template>
<div
v-observe-visibility="handleVisibilityChange"
class="vue-recycle-scroller"
:class="{
ready,
'page-mode': pageMode,
[`direction-${direction}`]: true,
}"
@scroll.passive="handleScroll"
>
<div
v-if="$slots.before"
class="vue-recycle-scroller__slot"
>
<slot
name="before"
/>
</div>
<div
ref="wrapper"
:style="{ [direction === 'vertical' ? 'minHeight' : 'minWidth']: totalSize + 'px' }"
class="vue-recycle-scroller__item-wrapper"
>
<div
v-for="view of pool"
:key="view.nr.id"
:style="ready ? {
transform: useTransform ? `translate${direction === 'vertical' ? 'Y' : 'X'}(${view.position}px)` : null,
top: !useTransform && direction === 'vertical' ? `${view.position}px` : null,
left: !useTransform && direction !== 'vertical' ? `${view.position}px` : null,
} : null"
class="vue-recycle-scroller__item-view"
:class="{ hover: hoverKey === view.nr.key }"
@mouseenter="hoverKey = view.nr.key"
@mouseleave="hoverKey = null"
>
<slot
:item="view.item"
:index="view.nr.index"
:active="view.nr.used"
/>
</div>
</div>
<div
v-if="$slots.after"
class="vue-recycle-scroller__slot"
>
<slot
name="after"
/>
</div>
<ResizeObserver @notify="handleResize" />
</div>
</template>
<script>
import { ResizeObserver } from 'vue-resize'
import { ObserveVisibility } from 'vue-observe-visibility'
import ScrollParent from 'scrollparent'
import config from '../config'
import { props, simpleArray } from './common'
import { supportsPassive } from '../utils'
let uid = 0
export default {
name: 'RecycleScroller',
components: {
ResizeObserver,
},
directives: {
ObserveVisibility,
},
props: {
...props,
itemSize: {
type: Number,
default: null,
},
minItemSize: {
type: [Number, String],
default: null,
},
sizeField: {
type: String,
default: 'size',
},
typeField: {
type: String,
default: 'type',
},
buffer: {
type: Number,
default: 200,
},
pageMode: {
type: Boolean,
default: false,
},
prerender: {
type: Number,
default: 0,
},
emitUpdate: {
type: Boolean,
default: false,
},
useTransform: {
type: Boolean,
default: true,
}
},
data () {
return {
pool: [],
totalSize: 0,
ready: false,
hoverKey: null,
}
},
computed: {
sizes () {
if (this.itemSize === null) {
const sizes = {
'-1': { accumulator: 0 },
}
const items = this.items
const field = this.sizeField
const minItemSize = this.minItemSize
let computedMinSize = 10000
let accumulator = 0
let current
for (let i = 0, l = items.length; i < l; i++) {
current = items[i][field] || minItemSize
if (current < computedMinSize) {
computedMinSize = current
}
accumulator += current
sizes[i] = { accumulator, size: current }
}
// eslint-disable-next-line
this.$_computedMinItemSize = computedMinSize
return sizes
}
return []
},
simpleArray,
},
watch: {
items () {
this.updateVisibleItems(true)
},
pageMode () {
this.applyPageMode()
this.updateVisibleItems(false)
},
sizes: {
handler () {
this.updateVisibleItems(false)
},
deep: true,
},
},
created () {
this.$_startIndex = 0
this.$_endIndex = 0
this.$_views = new Map()
this.$_unusedViews = new Map()
this.$_scrollDirty = false
this.$_lastUpdateScrollPosition = 0
// In SSR mode, we also prerender the same number of item for the first render
// to avoir mismatch between server and client templates
if (this.prerender) {
this.$_prerender = true
this.updateVisibleItems(false)
}
},
mounted () {
this.applyPageMode()
this.$nextTick(() => {
// In SSR mode, render the real number of visible items
this.$_prerender = false
this.updateVisibleItems(true)
this.ready = true
})
},
beforeDestroy () {
this.removeListeners()
},
methods: {
addView (pool, index, item, key, type) {
const view = {
item,
position: 0,
}
const nonReactive = {
id: uid++,
index,
used: true,
key,
type,
}
Object.defineProperty(view, 'nr', {
configurable: false,
value: nonReactive,
})
pool.push(view)
return view
},
unuseView (view, fake = false) {
const unusedViews = this.$_unusedViews
const type = view.nr.type
let unusedPool = unusedViews.get(type)
if (!unusedPool) {
unusedPool = []
unusedViews.set(type, unusedPool)
}
unusedPool.push(view)
if (!fake) {
view.nr.used = false
view.position = -9999
this.$_views.delete(view.nr.key)
}
},
handleResize () {
this.$emit('resize')
if (this.ready) this.updateVisibleItems(false)
},
handleScroll (event) {
if (!this.$_scrollDirty) {
this.$_scrollDirty = true
requestAnimationFrame(() => {
this.$_scrollDirty = false
const { continuous } = this.updateVisibleItems(false, true)
// It seems sometimes chrome doesn't fire scroll event :/
// When non continous scrolling is ending, we force a refresh
if (!continuous) {
clearTimeout(this.$_refreshTimout)
this.$_refreshTimout = setTimeout(this.handleScroll, 100)
}
})
}
},
handleVisibilityChange (isVisible, entry) {
if (this.ready) {
if (isVisible || entry.boundingClientRect.width !== 0 || entry.boundingClientRect.height !== 0) {
this.$emit('visible')
requestAnimationFrame(() => {
this.updateVisibleItems(false)
})
} else {
this.$emit('hidden')
}
}
},
updateVisibleItems (checkItem, checkPositionDiff = false) {
const itemSize = this.itemSize
const minItemSize = this.$_computedMinItemSize
const typeField = this.typeField
const keyField = this.simpleArray ? null : this.keyField
const items = this.items
const count = items.length
const sizes = this.sizes
const views = this.$_views
const unusedViews = this.$_unusedViews
const pool = this.pool
let startIndex, endIndex
let totalSize
if (!count) {
startIndex = endIndex = totalSize = 0
} else if (this.$_prerender) {
startIndex = 0
endIndex = this.prerender
totalSize = null
} else {
const scroll = this.getScroll()
// Skip update if use hasn't scrolled enough
if (checkPositionDiff) {
let positionDiff = scroll.start - this.$_lastUpdateScrollPosition
if (positionDiff < 0) positionDiff = -positionDiff
if ((itemSize === null && positionDiff < minItemSize) || positionDiff < itemSize) {
return {
continuous: true,
}
}
}
this.$_lastUpdateScrollPosition = scroll.start
const buffer = this.buffer
scroll.start -= buffer
scroll.end += buffer
// Variable size mode
if (itemSize === null) {
let h
let a = 0
let b = count - 1
let i = ~~(count / 2)
let oldI
// Searching for startIndex
do {
oldI = i
h = sizes[i].accumulator
if (h < scroll.start) {
a = i
} else if (i < count - 1 && sizes[i + 1].accumulator > scroll.start) {
b = i
}
i = ~~((a + b) / 2)
} while (i !== oldI)
i < 0 && (i = 0)
startIndex = i
// For container style
totalSize = sizes[count - 1].accumulator
// Searching for endIndex
for (endIndex = i; endIndex < count && sizes[endIndex].accumulator < scroll.end; endIndex++);
if (endIndex === -1) {
endIndex = items.length - 1
} else {
endIndex++
// Bounds
endIndex > count && (endIndex = count)
}
} else {
// Fixed size mode
startIndex = ~~(scroll.start / itemSize)
endIndex = Math.ceil(scroll.end / itemSize)
// Bounds
startIndex < 0 && (startIndex = 0)
endIndex > count && (endIndex = count)
totalSize = count * itemSize
}
}
if (endIndex - startIndex > config.itemsLimit) {
this.itemsLimitError()
}
this.totalSize = totalSize
let view
const continuous = startIndex <= this.$_endIndex && endIndex >= this.$_startIndex
if (this.$_continuous !== continuous) {
if (continuous) {
views.clear()
unusedViews.clear()
for (let i = 0, l = pool.length; i < l; i++) {
view = pool[i]
this.unuseView(view)
}
}
this.$_continuous = continuous
} else if (continuous) {
for (let i = 0, l = pool.length; i < l; i++) {
view = pool[i]
if (view.nr.used) {
// Update view item index
if (checkItem) {
view.nr.index = items.findIndex(
item => keyField ? item[keyField] === view.item[keyField] : item === view.item,
)
}
// Check if index is still in visible range
if (
view.nr.index === -1 ||
view.nr.index < startIndex ||
view.nr.index >= endIndex
) {
this.unuseView(view)
}
}
}
}
const unusedIndex = continuous ? null : new Map()
let item, type, unusedPool
let v
for (let i = startIndex; i < endIndex; i++) {
item = items[i]
const key = keyField ? item[keyField] : item
if (key == null) {
throw new Error(`Key is ${key} on item (keyField is '${keyField}')`)
}
view = views.get(key)
if (!itemSize && !sizes[i].size) {
if (view) this.unuseView(view)
continue
}
// No view assigned to item
if (!view) {
type = item[typeField]
unusedPool = unusedViews.get(type)
if (continuous) {
// Reuse existing view
if (unusedPool && unusedPool.length) {
view = unusedPool.pop()
view.item = item
view.nr.used = true
view.nr.index = i
view.nr.key = key
view.nr.type = type
} else {
view = this.addView(pool, i, item, key, type)
}
} else {
// Use existing view
// We don't care if they are already used
// because we are not in continous scrolling
v = unusedIndex.get(type) || 0
if (!unusedPool || v >= unusedPool.length) {
view = this.addView(pool, i, item, key, type)
this.unuseView(view, true)
unusedPool = unusedViews.get(type)
}
view = unusedPool[v]
view.item = item
view.nr.used = true
view.nr.index = i
view.nr.key = key
view.nr.type = type
unusedIndex.set(type, v + 1)
v++
}
views.set(key, view)
} else {
view.nr.used = true
view.item = item
}
// Update position
if (itemSize === null) {
view.position = sizes[i - 1].accumulator
} else {
view.position = i * itemSize
}
}
this.$_startIndex = startIndex
this.$_endIndex = endIndex
if (this.emitUpdate) this.$emit('update', startIndex, endIndex)
// After the user has finished scrolling
// Sort views so text selection is correct
clearTimeout(this.$_sortTimer)
this.$_sortTimer = setTimeout(this.sortViews, 300)
return {
continuous,
}
},
getListenerTarget () {
let target = ScrollParent(this.$el)
// Fix global scroll target for Chrome and Safari
if (window.document && (target === window.document.documentElement || target === window.document.body)) {
target = window
}
return target
},
getScroll () {
const { $el: el, direction } = this
const isVertical = direction === 'vertical'
let scrollState
if (this.pageMode) {
const bounds = el.getBoundingClientRect()
const boundsSize = isVertical ? bounds.height : bounds.width
let start = -(isVertical ? bounds.top : bounds.left)
let size = isVertical ? window.innerHeight : window.innerWidth
if (start < 0) {
size += start
start = 0
}
if (start + size > boundsSize) {
size = boundsSize - start
}
scrollState = {
start,
end: start + size,
}
} else if (isVertical) {
scrollState = {
start: el.scrollTop,
end: el.scrollTop + el.clientHeight,
}
} else {
scrollState = {
start: el.scrollLeft,
end: el.scrollLeft + el.clientWidth,
}
}
return scrollState
},
applyPageMode () {
if (this.pageMode) {
this.addListeners()
} else {
this.removeListeners()
}
},
addListeners () {
this.listenerTarget = this.getListenerTarget()
this.listenerTarget.addEventListener('scroll', this.handleScroll, supportsPassive ? {
passive: true,
} : false)
this.listenerTarget.addEventListener('resize', this.handleResize)
},
removeListeners () {
if (!this.listenerTarget) {
return
}
this.listenerTarget.removeEventListener('scroll', this.handleScroll)
this.listenerTarget.removeEventListener('resize', this.handleResize)
this.listenerTarget = null
},
scrollToItem (index) {
let scroll
if (this.itemSize === null) {
scroll = index > 0 ? this.sizes[index - 1].accumulator : 0
} else {
scroll = index * this.itemSize
}
this.scrollToPosition(scroll)
},
scrollToPosition (position) {
if (this.direction === 'vertical') {
this.$el.scrollTop = position
} else {
this.$el.scrollLeft = position
}
},
itemsLimitError () {
setTimeout(() => {
console.log('It seems the scroller element isn\'t scrolling, so it tries to render all the items at once.', 'Scroller:', this.$el)
console.log('Make sure the scroller has a fixed height (or width) and \'overflow-y\' (or \'overflow-x\') set to \'auto\' so it can scroll correctly and only render the items visible in the scroll viewport.')
})
throw new Error('Rendered items limit reached')
},
sortViews () {
this.pool.sort((viewA, viewB) => viewA.nr.index - viewB.nr.index)
},
},
}
</script>
<style>
.vue-recycle-scroller {
position: relative;
}
.vue-recycle-scroller.direction-vertical:not(.page-mode) {
overflow-y: auto;
}
.vue-recycle-scroller.direction-horizontal:not(.page-mode) {
overflow-x: auto;
}
.vue-recycle-scroller.direction-horizontal {
display: flex;
}
.vue-recycle-scroller__slot {
flex: auto 0 0;
}
.vue-recycle-scroller__item-wrapper {
flex: 1;
box-sizing: border-box;
overflow: hidden;
position: relative;
}
.vue-recycle-scroller.ready .vue-recycle-scroller__item-view {
position: absolute;
top: 0;
left: 0;
will-change: transform;
}
.vue-recycle-scroller.direction-vertical .vue-recycle-scroller__item-wrapper {
width: 100%;
}
.vue-recycle-scroller.direction-horizontal .vue-recycle-scroller__item-wrapper {
height: 100%;
}
.vue-recycle-scroller.ready.direction-vertical .vue-recycle-scroller__item-view {
width: 100%;
}
.vue-recycle-scroller.ready.direction-horizontal .vue-recycle-scroller__item-view {
height: 100%;
}
</style>

View File

@ -0,0 +1,21 @@
export const props = {
items: {
type: Array,
required: true,
},
keyField: {
type: String,
default: 'id',
},
direction: {
type: String,
default: 'vertical',
validator: (value) => ['vertical', 'horizontal'].includes(value),
},
}
export function simpleArray () {
return this.items.length && typeof this.items[0] !== 'object'
}

View File

@ -0,0 +1,3 @@
export default {
itemsLimit: 1000,
}

View File

@ -0,0 +1,60 @@
/**
* See https://gitlab.com/gitlab-org/gitlab/-/issues/331267 for more information on this vendored
* dependency
*/
import config from './config'
import RecycleScroller from './components/RecycleScroller.vue'
import DynamicScroller from './components/DynamicScroller.vue'
import DynamicScrollerItem from './components/DynamicScrollerItem.vue'
export { default as IdState } from './mixins/IdState'
export {
RecycleScroller,
DynamicScroller,
DynamicScrollerItem,
}
function registerComponents (Vue, prefix) {
Vue.component(`${prefix}recycle-scroller`, RecycleScroller)
Vue.component(`${prefix}RecycleScroller`, RecycleScroller)
Vue.component(`${prefix}dynamic-scroller`, DynamicScroller)
Vue.component(`${prefix}DynamicScroller`, DynamicScroller)
Vue.component(`${prefix}dynamic-scroller-item`, DynamicScrollerItem)
Vue.component(`${prefix}DynamicScrollerItem`, DynamicScrollerItem)
}
const plugin = {
// eslint-disable-next-line no-undef
install (Vue, options) {
const finalOptions = Object.assign({}, {
installComponents: true,
componentsPrefix: '',
}, options)
for (const key in finalOptions) {
if (typeof finalOptions[key] !== 'undefined') {
config[key] = finalOptions[key]
}
}
if (finalOptions.installComponents) {
registerComponents(Vue, finalOptions.componentsPrefix)
}
},
}
export default plugin
// Auto-install
let GlobalVue = null
if (typeof window !== 'undefined') {
GlobalVue = window.Vue
} else if (typeof global !== 'undefined') {
GlobalVue = global.Vue
}
if (GlobalVue) {
GlobalVue.use(plugin)
}

View File

@ -0,0 +1,79 @@
import Vue from 'vue'
export default function ({
idProp = vm => vm.item.id,
} = {}) {
const store = {}
const vm = new Vue({
data () {
return {
store,
}
},
})
// @vue/component
return {
data () {
return {
idState: null,
}
},
created () {
this.$_id = null
if (typeof idProp === 'function') {
this.$_getId = () => idProp.call(this, this)
} else {
this.$_getId = () => this[idProp]
}
this.$watch(this.$_getId, {
handler (value) {
this.$nextTick(() => {
this.$_id = value
})
},
immediate: true,
})
this.$_updateIdState()
},
beforeUpdate () {
this.$_updateIdState()
},
methods: {
/**
* Initialize an idState
* @param {number|string} id Unique id for the data
*/
$_idStateInit (id) {
const factory = this.$options.idState
if (typeof factory === 'function') {
const data = factory.call(this, this)
vm.$set(store, id, data)
this.$_id = id
return data
} else {
throw new Error('[mixin IdState] Missing `idState` function on component definition.')
}
},
/**
* Ensure idState is created and up-to-date
*/
$_updateIdState () {
const id = this.$_getId()
if (id == null) {
console.warn(`No id found for IdState with idProp: '${idProp}'.`)
}
if (id !== this.$_id) {
if (!store[id]) {
this.$_idStateInit(id)
}
this.idState = store[id]
}
},
},
}
}

View File

@ -0,0 +1,13 @@
export let supportsPassive = false
if (typeof window !== 'undefined') {
supportsPassive = false
try {
var opts = Object.defineProperty({}, 'passive', {
get () {
supportsPassive = true
},
})
window.addEventListener('test', null, opts)
} catch (e) {}
}

View File

@ -784,10 +784,10 @@
core-js-pure "^3.0.0"
regenerator-runtime "^0.13.4"
"@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
version "7.11.2"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.11.2.tgz#f549c13c754cc40b87644b9fa9f09a6a95fe0736"
integrity sha512-TeWkU52so0mPtDcaCTxNBI/IHiz0pZgr8VEFqXFtZWpYD08ZB6FaSwVAS8MKRQAP3bYKiVjwysOJgMFY28o6Tw==
"@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.13.10", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
version "7.14.0"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.14.0.tgz#46794bc20b612c5f75e62dd071e24dfd95f1cbe6"
integrity sha512-JELkvo/DlpNdJ7dlyw/eY7E0suy5i5GQH+Vlxaq1nsNJ+H7f4Vtv3jMeCEgRhZZQFXTjldYfQgv2qmM6M1v5wA==
dependencies:
regenerator-runtime "^0.13.4"
@ -10359,6 +10359,11 @@ schema-utils@^3.0.0:
ajv "^6.12.5"
ajv-keywords "^3.5.2"
scrollparent@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/scrollparent/-/scrollparent-2.0.1.tgz#715d5b9cc57760fb22bdccc3befb5bfe06b1a317"
integrity sha1-cV1bnMV3YPsivczDvvtb/gaxoxc=
select-hose@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca"
@ -12126,6 +12131,18 @@ vue-loader@^15.9.6:
vue-hot-reload-api "^2.3.0"
vue-style-loader "^4.1.0"
vue-observe-visibility@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/vue-observe-visibility/-/vue-observe-visibility-1.0.0.tgz#17cf1b2caf74022f0f3c95371468ddf2b9573152"
integrity sha512-s5TFh3s3h3Mhd3jaz3zGzkVHKHnc/0C/gNr30olO99+yw2hl3WBhK3ng3/f9OF+qkW4+l7GkmwfAzDAcY3lCFg==
vue-resize@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/vue-resize/-/vue-resize-1.0.1.tgz#c120bed4e09938771d622614f57dbcf58a5147ee"
integrity sha512-z5M7lJs0QluJnaoMFTIeGx6dIkYxOwHThlZDeQnWZBizKblb99GSejPnK37ZbNE/rVwDcYcHY+Io+AxdpY952w==
dependencies:
"@babel/runtime" "^7.13.10"
vue-router@3.4.9:
version "3.4.9"
resolved "https://registry.yarnpkg.com/vue-router/-/vue-router-3.4.9.tgz#c016f42030ae2932f14e4748b39a1d9a0e250e66"