Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-24 12:11:34 +00:00
parent fda0e422a7
commit d5f67e75b6
35 changed files with 340 additions and 59 deletions

View File

@ -168,11 +168,6 @@ export default {
},
methods: {
toggleCollapsed() {
// Because the top-level div is always clickable, we need to check if we can collapse.
if (!this.isCollapsible) {
return;
}
if (this.trackAction) {
api.trackRedisHllUserEvent(this.trackAction);
}
@ -187,7 +182,7 @@ export default {
</script>
<template>
<section class="media-section">
<div class="media" :class="{ 'gl-cursor-pointer': isCollapsible }" @click="toggleCollapsed">
<div class="media">
<status-icon :status="statusIconName" :size="24" class="align-self-center" />
<div class="media-body gl-display-flex gl-align-items-flex-start gl-flex-direction-row!">
<div
@ -218,7 +213,7 @@ export default {
category="tertiary"
size="small"
:icon="isExpanded ? 'chevron-lg-up' : 'chevron-lg-down'"
@click.stop="toggleCollapsed"
@click="toggleCollapsed"
/>
</div>
</div>

View File

@ -7,20 +7,26 @@ module ProtectedBranches
CACHE_EXPIRE_IN = 1.day
CACHE_LIMIT = 1000
def fetch(ref_name, dry_run: false)
def fetch(ref_name, dry_run: false, &block)
record = OpenSSL::Digest::SHA256.hexdigest(ref_name)
Gitlab::Redis::Cache.with do |redis|
cached_result = redis.hget(redis_key, record)
decoded_result = Gitlab::Redis::Boolean.decode(cached_result) unless cached_result.nil?
if cached_result.nil?
metrics.increment_cache_miss
else
metrics.increment_cache_hit
decoded_result = Gitlab::Redis::Boolean.decode(cached_result)
end
# If we're dry-running, don't break because we need to check against
# the real value to ensure the cache is working properly.
# If the result is nil we'll need to run the block, so don't break yet.
break decoded_result unless dry_run || decoded_result.nil?
calculated_value = yield
calculated_value = metrics.observe_cache_generation(&block)
check_and_log_discrepancy(decoded_result, calculated_value, ref_name) if dry_run
@ -64,5 +70,14 @@ module ProtectedBranches
def redis_key
@redis_key ||= [CACHE_ROOT_KEY, @project.id].join(':')
end
def metrics
@metrics ||= Gitlab::Cache::Metrics.new(
caller_id: Gitlab::ApplicationContext.current_context_attribute(:caller_id),
cache_identifier: "#{self.class}#fetch",
feature_category: :source_code_management,
backing_resource: :cpu
)
end
end
end

View File

@ -148,6 +148,8 @@ The following metrics are available:
| `gitlab_ci_build_trace_errors_total` | Counter | 14.4 | Total amount of different error types on a build trace | `error_reason` |
| `gitlab_presentable_object_cacheless_render_real_duration_seconds` | Histogram | 15.3 | Duration of real time spent caching and representing specific web request objects | `controller`, `action` |
| `cached_object_operations_total` | Counter | 15.3 | Total number of objects cached for specific web requests | `controller`, `action` |
| `redis_hit_miss_operations_total` | Counter | 15.6 | Total number of Redis cache hits and misses | `cache_hit`, `caller_id`, `cache_identifier`, `feature_category`, `backing_resource` |
| `redis_cache_generation_duration_seconds` | Histogram | 15.6 | Time to generate Redis cache | `cache_hit`, `caller_id`, `cache_identifier`, `feature_category`, `backing_resource` |
## Metrics controlled by a feature flag

View File

@ -4,7 +4,7 @@ group: Product Analytics
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Product analytics API
# Product analytics API **(ULTIMATE)**
> Introduced in GitLab 15.4 [with a flag](../administration/feature_flags.md) named `cube_api_proxy`. Disabled by default.

View File

@ -257,7 +257,7 @@ To filter:
1. On the top bar, select **Main menu > Groups** and find your group.
1. On the left sidebar, select **Epics**.
1. Select the field **Search or filter results**.
1. From the dropdown menu, select the scope or enter plain text to search by epic title or description.
1. From the dropdown list, select the scope or enter plain text to search by epic title or description.
1. Press <kbd>Enter</kbd> on your keyboard. The list is filtered.
## Sort the list of epics
@ -282,10 +282,10 @@ You can reverse the default order and interact with the activity feed sorted by
at the top. Your preference is saved via local storage and automatically applied to every epic and issue
you view.
To change the activity sort order, select the **Oldest first** dropdown menu and select either oldest
To change the activity sort order, select the **Oldest first** dropdown list and select either oldest
or newest items to be shown first.
![Issue activity sort order dropdown button](img/epic_activity_sort_order_v13_2.png)
![Issue activity sort order dropdown list](img/epic_activity_sort_order_v13_2.png)
## Make an epic confidential

View File

@ -278,8 +278,8 @@ To group issues by label:
1. On the top bar, select **Main menu > Groups** and find your group.
1. On the left sidebar, select **Issues > Iterations**.
1. In the **Group by** dropdown, select **Label**.
1. Select the **Filter by label** dropdown.
1. Select the labels you want to group by in the labels dropdown.
1. In the **Group by** dropdown list, select **Label**.
1. Select the **Filter by label** dropdown list.
1. Select the labels you want to group by in the labels dropdown list.
You can also search for labels by typing in the search input.
1. Select any area outside the label dropdown list. The page is now grouped by the selected labels.

View File

@ -58,10 +58,10 @@ Epic "1"*-- "0..*" Issue
In an issue, you can view the parented epic above the issue in the right sidebar under **Epic**.
![epics state dropdown](img/issue-view-parent-epic-in-sidebar_v14_6.png)
![epics state dropdown list](img/issue-view-parent-epic-in-sidebar_v14_6.png)
## View ancestry of an epic
In an epic, you can view the ancestors as parents in the right sidebar under **Ancestors**.
![epics state dropdown](img/epic-view-ancestors-in-sidebar_v14_6.png)
![epics state dropdown list](img/epic-view-ancestors-in-sidebar_v14_6.png)

View File

@ -58,7 +58,7 @@ To get the report:
1. Select the projects and date range you want to include in the report.
1. Select **Download test coverage data (.csv)**.
The projects dropdown shows up to 100 projects from your group. If the project you want to check is not in the dropdown list, you can select **All projects** to download the report for all projects in your group, including any projects that are not listed. There is a plan to improve this behavior in this [related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/250684).
The projects dropdown list shows up to 100 projects from your group. If the project you want to check is not in the dropdown list, you can select **All projects** to download the report for all projects in your group, including any projects that are not listed. There is a plan to improve this behavior in this [related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/250684).
For each day that a coverage report was generated by a job in a project's pipeline, a row in the CSV includes:

View File

@ -91,7 +91,7 @@ Refer to the [Civo Terraform provider](https://registry.terraform.io/providers/c
After configuring your project, manually trigger the provisioning of your cluster. In GitLab:
1. On the left sidebar, go to **CI/CD > Pipelines**.
1. Next to **Play** (**{play}**), select the dropdown icon (**{chevron-lg-down}**).
1. Next to **Play** (**{play}**), select the dropdown list icon (**{chevron-lg-down}**).
1. Select **Deploy** to manually trigger the deployment job.
When the pipeline finishes successfully, you can see your new cluster:

View File

@ -92,7 +92,7 @@ View the [AWS Terraform provider](https://registry.terraform.io/providers/hashic
After configuring your project, manually trigger the provisioning of your cluster. In GitLab:
1. On the left sidebar, go to **CI/CD > Pipelines**.
1. Next to **Play** (**{play}**), select the dropdown icon (**{chevron-lg-down}**).
1. Next to **Play** (**{play}**), select the dropdown list icon (**{chevron-lg-down}**).
1. Select **Deploy** to manually trigger the deployment job.
When the pipeline finishes successfully, you can view the new cluster:

View File

@ -117,7 +117,7 @@ Refer to the [Google Terraform provider](https://registry.terraform.io/providers
After configuring your project, manually trigger the provisioning of your cluster. In GitLab:
1. On the left sidebar, go to **CI/CD > Pipelines**.
1. Next to **Play** (**{play}**), select the dropdown icon (**{chevron-lg-down}**).
1. Next to **Play** (**{play}**), select the dropdown list icon (**{chevron-lg-down}**).
1. Select **Deploy** to manually trigger the deployment job.
When the pipeline finishes successfully, you can see your new cluster:

View File

@ -523,7 +523,7 @@ for more details about the permissions that this setting grants to users.
1. Go to your project's **Settings > General** page.
1. Expand the section **Visibility, project features, permissions**.
1. Under **Container Registry**, select an option from the dropdown:
1. Under **Container Registry**, select an option from the dropdown list:
- **Everyone With Access** (Default): The Container Registry is visible to everyone with access
to the project. If the project is public, the Container Registry is also public. If the project

View File

@ -109,7 +109,7 @@ To select a notification level for a group, use either of these methods:
Or:
1. On the top bar, select **Main menu > Groups** and find your group.
1. Select the notification dropdown, next to the bell icon (**{notifications}**).
1. Select the notification dropdown list, next to the bell icon (**{notifications}**).
1. Select the desired [notification level](#notification-levels).
#### Change email address used for group notifications
@ -140,7 +140,7 @@ To select a notification level for a project, use either of these methods:
Or:
1. On the top bar, select **Main menu > Projects** and find your project.
1. Select the notification dropdown, next to the bell icon (**{notifications}**).
1. Select the notification dropdown list, next to the bell icon (**{notifications}**).
1. Select the desired [notification level](#notification-levels).
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>

View File

@ -137,7 +137,7 @@ You can include the following options for your default dashboard view:
### Group overview content
The **Group overview content** dropdown allows you to choose what information is
The **Group overview content** dropdown list allows you to choose what information is
displayed on a group's home page.
You can choose between 2 options:

View File

@ -69,7 +69,7 @@ To add a Kubernetes cluster to your project, group, or instance:
1. Project's **{cloud-gear}** **Infrastructure > Kubernetes clusters** page, for a project-level cluster.
1. Group's **{cloud-gear}** **Kubernetes** page, for a group-level cluster.
1. **Main menu > Admin > Kubernetes** page, for an instance-level cluster.
1. On the **Kubernetes clusters** page, select the **Connect with a certificate** option from the **Actions** dropdown menu.
1. On the **Kubernetes clusters** page, select the **Connect with a certificate** option from the **Actions** dropdown list.
1. On the **Connect a cluster** page, fill in the details:
1. **Kubernetes cluster name** (required) - The name you wish to give the cluster.
1. **Environment scope** (required) - The

View File

@ -28,7 +28,7 @@ notifications to Google Chat:
To enable the integration in Google Chat:
1. Enter the room where you want to receive notifications from GitLab.
1. Open the room dropdown menu on the top-left and select **Manage webhooks**.
1. Open the room dropdown list on the top-left and select **Manage webhooks**.
1. Enter the name for your webhook, for example "GitLab integration".
1. Optional. Add an avatar for your bot.
1. Select **Save**.

View File

@ -45,7 +45,7 @@ to control GitLab from Slack. Slash commands are configured separately.
1. Optional. In **Username**, enter the username of the Slack bot that sends
the notifications.
1. Select the **Notify only broken pipelines** checkbox to notify only on failures.
1. In the **Branches for which notifications are to be sent** dropdown, select which types of branches
1. In the **Branches for which notifications are to be sent** dropdown list, select which types of branches
to send notifications for.
1. Leave the **Labels to be notified** field blank to get all notifications, or
add labels that the issue or merge request must have to trigger a

View File

@ -22,7 +22,7 @@ In GitLab:
1. Select the checkboxes corresponding to the GitLab events you want to receive in Unify Circuit.
1. Paste the **Webhook URL** that you copied from the Unify Circuit configuration step.
1. Select the **Notify only broken pipelines** checkbox to notify only on failures.
1. In the **Branches for which notifications are to be sent** dropdown, select which types of branches to send notifications for.
1. In the **Branches for which notifications are to be sent** dropdown list, select which types of branches to send notifications for.
1. Select `Save changes` or optionally select **Test settings**.
Your Unify Circuit conversation now starts receiving GitLab event notifications.

View File

@ -35,7 +35,7 @@ The last way to set a due date is by using [quick actions](../quick_actions.md),
You can see issues with their due dates in the issues list.
Overdue issues have their icon and date colored red.
To sort issues by their due dates, select **Due date** from the dropdown menu on the right.
To sort issues by their due dates, select **Due date** from the dropdown list on the right.
Issues are then sorted from the earliest due date to the latest.
To display issues with the latest due dates at the top, select **Sort direction** (**{sort-lowest}**).

View File

@ -19,7 +19,7 @@ This data extraction job can take a few hours to complete (possibly up to a day)
### Generating suggestions
Once Suggested Reviewers is enabled and the data extraction is complete, new merge requests or new commits to existing merge requests will automatically trigger a Suggested Reviewers ML model inference and generate up to 5 suggested reviewers. These suggestions are contextual to the changes in the merge request. Additional commits to merge requests may change the reviewer suggestions which will automatically update in the reviewer dropdown.
Once Suggested Reviewers is enabled and the data extraction is complete, new merge requests or new commits to existing merge requests will automatically trigger a Suggested Reviewers ML model inference and generate up to 5 suggested reviewers. These suggestions are contextual to the changes in the merge request. Additional commits to merge requests may change the reviewer suggestions which will automatically update in the reviewer dropdown list.
## Progressive enhancement

View File

@ -39,7 +39,7 @@ Project Maintainers or Owners can enable suggested reviewers by visiting the [pr
Enabling suggested reviewers will trigger GitLab to create an ML model for your project that will be used to generate reviewers. The larger your project, the longer this can take, but usually, the model will be ready to generate suggestions within a few hours.
No action is required once the feature is enabled. Once the model is ready, recommendations will populate the Reviewer dropdown in the right-hand sidebar of a merge request with new commits.
No action is required once the feature is enabled. Once the model is ready, recommendations will populate the Reviewer dropdown list in the right-hand sidebar of a merge request with new commits.
## Review a merge request

View File

@ -16,12 +16,12 @@ request diffs.
## Selecting a version
By default, the latest version of changes is shown. However, you
can select an older one from version dropdown.
can select an older one from version dropdown list.
![Merge request versions dropdown](img/versions_dropdown.png)
![Merge request versions dropdown list](img/versions_dropdown.png)
Merge request versions are based on push not on commit. So, if you pushed 5
commits in a single push, it displays as a single option in the dropdown. If you
commits in a single push, it displays as a single option in the dropdown list. If you
pushed 5 times, that counts for 5 options.
You can also compare the merge request version with an older one to see what has

View File

@ -250,7 +250,7 @@ can use the following setup:
on the top nav.
- Select **Create Page Rule**.
- Enter the domain `www.domain.com` and select **+ Add a Setting**.
- From the dropdown menu, choose **Forwarding URL**, then select the
- From the dropdown list, choose **Forwarding URL**, then select the
status code **301 - Permanent Redirect**.
- Enter the destination URL `https://domain.com`.

87
lib/gitlab/cache/metrics.rb vendored Normal file
View File

@ -0,0 +1,87 @@
# frozen_string_literal: true
# Instrumentation for cache efficiency metrics
module Gitlab
module Cache
class Metrics
DEFAULT_BUCKETS = [0, 1, 5].freeze
VALID_BACKING_RESOURCES = [:cpu, :database, :gitaly, :memory, :unknown].freeze
DEFAULT_BACKING_RESOURCE = :unknown
def initialize(
caller_id:,
cache_identifier:,
feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT,
backing_resource: DEFAULT_BACKING_RESOURCE
)
@caller_id = caller_id
@cache_identifier = cache_identifier
@feature_category = Gitlab::FeatureCategories.default.get!(feature_category)
@backing_resource = fetch_backing_resource!(backing_resource)
end
# Increase cache hit counter
#
def increment_cache_hit
counter.increment(labels.merge(cache_hit: true))
end
# Increase cache miss counter
#
def increment_cache_miss
counter.increment(labels.merge(cache_hit: false))
end
# Measure the duration of cacheable action
#
# @example
# observe_cache_generation do
# cacheable_action
# end
#
def observe_cache_generation(&block)
real_start = Gitlab::Metrics::System.monotonic_time
value = yield
histogram.observe({}, Gitlab::Metrics::System.monotonic_time - real_start)
value
end
private
attr_reader :caller_id, :cache_identifier, :feature_category, :backing_resource
def counter
@counter ||= Gitlab::Metrics.counter(:redis_hit_miss_operations_total, "Hit/miss Redis cache counter")
end
def histogram
@histogram ||= Gitlab::Metrics.histogram(
:redis_cache_generation_duration_seconds,
'Duration of Redis cache generation',
labels,
DEFAULT_BUCKETS
)
end
def labels
@labels ||= {
caller_id: caller_id,
cache_identifier: cache_identifier,
feature_category: feature_category,
backing_resource: backing_resource
}
end
def fetch_backing_resource!(resource)
return resource if VALID_BACKING_RESOURCES.include?(resource)
raise "Unknown backing resource: #{resource}" if Gitlab.dev_or_test_env?
DEFAULT_BACKING_RESOURCE
end
end
end
end

View File

@ -31,6 +31,14 @@ module Gitlab
category
end
def get!(feature_category)
return feature_category if valid?(feature_category)
raise "Unknown feature category: #{feature_category}" if Gitlab.dev_or_test_env?
FEATURE_CATEGORY_DEFAULT
end
def valid?(category)
categories.include?(category.to_s)
end

View File

@ -36615,6 +36615,9 @@ msgstr ""
msgid "SecurityReports|More info"
msgstr ""
msgid "SecurityReports|New vulnerabilities are vulnerabilities that the security scan detects in the merge request that are different to existing vulnerabilities in the default branch."
msgstr ""
msgid "SecurityReports|No activity"
msgstr ""
@ -36672,6 +36675,9 @@ msgstr ""
msgid "SecurityReports|Security reports help page link"
msgstr ""
msgid "SecurityReports|Security scan results"
msgstr ""
msgid "SecurityReports|Security scans have run"
msgstr ""

View File

@ -219,7 +219,7 @@
"cheerio": "^1.0.0-rc.9",
"commander": "^2.20.3",
"custom-jquery-matchers": "^2.1.0",
"eslint": "8.25.0",
"eslint": "8.26.0",
"eslint-import-resolver-jest": "3.0.2",
"eslint-import-resolver-webpack": "0.13.2",
"eslint-plugin-import": "^2.26.0",

View File

@ -1,7 +1,5 @@
# frozen_string_literal: true
require_relative '../../../lib/gitlab/json'
module RuboCop
module Cop
module Gitlab
@ -12,6 +10,8 @@ module RuboCop
Prefer `Gitlab::Json` over calling `JSON` or `to_json` directly. See https://docs.gitlab.com/ee/development/json.html
EOL
AVAILABLE_METHODS = %i[parse parse! load decode dump generate encode pretty_generate].to_set.freeze
def_node_matcher :json_node?, <<~PATTERN
(send (const {nil? | (const nil? :ActiveSupport)} :JSON) $_ $...)
PATTERN
@ -37,12 +37,12 @@ module RuboCop
method_name, arg_nodes = json_node?(node)
# Only match if the method is implemented by Gitlab::Json
if method_name && ::Gitlab::Json.methods(false).include?(method_name)
if method_name && AVAILABLE_METHODS.include?(method_name)
return [method_name, arg_nodes.map(&:source).join(', ')]
end
receiver = to_json_call?(node)
return [:generate, receiver.source] if receiver
return [:dump, receiver.source] if receiver
nil
end

118
spec/lib/gitlab/cache/metrics_spec.rb vendored Normal file
View File

@ -0,0 +1,118 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Cache::Metrics do
subject(:metrics) do
described_class.new(
caller_id: caller_id,
cache_identifier: cache_identifier,
feature_category: feature_category,
backing_resource: backing_resource
)
end
let(:caller_id) { 'caller-id' }
let(:cache_identifier) { 'ApplicationController#show' }
let(:feature_category) { :source_code_management }
let(:backing_resource) { :unknown }
let(:counter_mock) { instance_double(Prometheus::Client::Counter) }
before do
allow(Gitlab::Metrics).to receive(:counter)
.with(
:redis_hit_miss_operations_total,
'Hit/miss Redis cache counter'
).and_return(counter_mock)
end
describe '#initialize' do
context 'when backing resource is not supported' do
let(:backing_resource) { 'foo' }
it { expect { metrics }.to raise_error(RuntimeError) }
context 'when on production' do
before do
allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
end
it 'does not raise an exception' do
expect { metrics }.not_to raise_error
end
end
end
end
describe '#increment_cache_hit' do
subject { metrics.increment_cache_hit }
it 'increments number of hits' do
expect(counter_mock)
.to receive(:increment)
.with(
{
caller_id: caller_id,
cache_identifier: cache_identifier,
feature_category: feature_category,
backing_resource: backing_resource,
cache_hit: true
}
).once
subject
end
end
describe '#increment_cache_miss' do
subject { metrics.increment_cache_miss }
it 'increments number of misses' do
expect(counter_mock)
.to receive(:increment)
.with(
{
caller_id: caller_id,
cache_identifier: cache_identifier,
feature_category: feature_category,
backing_resource: backing_resource,
cache_hit: false
}
).once
subject
end
end
describe '#observe_cache_generation' do
subject do
metrics.observe_cache_generation { action }
end
let(:action) { 'action' }
let(:histogram_mock) { instance_double(Prometheus::Client::Histogram) }
before do
allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100.0, 500.0)
end
it 'updates histogram metric' do
expect(Gitlab::Metrics).to receive(:histogram).with(
:redis_cache_generation_duration_seconds,
'Duration of Redis cache generation',
{
caller_id: caller_id,
cache_identifier: cache_identifier,
feature_category: feature_category,
backing_resource: backing_resource
},
[0, 1, 5]
).and_return(histogram_mock)
expect(histogram_mock).to receive(:observe).with({}, 400.0)
is_expected.to eq(action)
end
end
end

View File

@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::FeatureCategories do
let(:fake_categories) { %w(foo bar) }
subject { described_class.new(fake_categories) }
subject(:feature_categories) { described_class.new(fake_categories) }
describe "#valid?" do
it "returns true if category is known", :aggregate_failures do
@ -14,6 +14,28 @@ RSpec.describe Gitlab::FeatureCategories do
end
end
describe '#get!' do
subject { feature_categories.get!(category) }
let(:category) { 'foo' }
it { is_expected.to eq('foo') }
context 'when category does not exist' do
let(:category) { 'zzz' }
it { expect { subject }.to raise_error(RuntimeError) }
context 'when on production' do
before do
allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
end
it { is_expected.to eq('unknown') }
end
end
end
describe "#from_request" do
let(:request_env) { {} }
let(:verified) { true }

View File

@ -60,7 +60,7 @@ RSpec.describe RuboCop::Cop::Gitlab::Json do
expect_correction(<<~RUBY)
class Foo
def bar
Gitlab::Json.generate({ foo: "bar" })
Gitlab::Json.dump({ foo: "bar" })
end
end
RUBY

View File

@ -111,5 +111,16 @@ RSpec.describe ProtectedBranches::CacheService, :clean_gitlab_redis_cache do
expect(service.fetch('not-found') { true }).to eq(true)
end
end
describe 'metrics' do
it 'records hit ratio metrics' do
expect_next_instance_of(Gitlab::Cache::Metrics) do |metrics|
expect(metrics).to receive(:increment_cache_miss).once
expect(metrics).to receive(:increment_cache_hit).exactly(4).times
end
5.times { service.fetch('main') { true } }
end
end
end
# rubocop:enable Style/RedundantFetchBlock

View File

@ -27,7 +27,7 @@ module IpynbDiff
def validate_notebook(notebook)
notebook_json = Oj::Parser.usual.parse(notebook)
return notebook_json if notebook_json.key?('cells')
return notebook_json if notebook_json&.key?('cells')
raise InvalidNotebookError
rescue EncodingError, Oj::ParseError, JSON::ParserError

View File

@ -102,5 +102,16 @@ describe IpynbDiff::Transformer do
end
end
end
context 'when notebook can not be parsed' do
it 'raises error' do
notebook = '{"cells":[]}'
allow(Oj::Parser.usual).to receive(:parse).and_return(nil)
expect do
IpynbDiff::Transformer.new.transform(notebook)
end.to raise_error(IpynbDiff::InvalidNotebookError)
end
end
end
end

View File

@ -1312,10 +1312,10 @@
resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.1.0.tgz#0eee6373e11418bfe0b5638f654df7a4ca6a3950"
integrity sha512-wYn6r8zVZyQJ6rQaALBEln5B1pzxb9shV5Ef97kTvn6yVGrqyXVnDqnU24MXnFubR+rZjBY9NWuxX3FB2sTsjg==
"@humanwhocodes/config-array@^0.10.5":
version "0.10.5"
resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.10.5.tgz#bb679745224745fff1e9a41961c1d45a49f81c04"
integrity sha512-XVVDtp+dVvRxMoxSiSfasYaG02VEe1qH5cKgMQJWhol6HwzbcqoCMJi8dAGoYAO57jhUyhI6cWuRiTcRaDaYug==
"@humanwhocodes/config-array@^0.11.6":
version "0.11.6"
resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.6.tgz#6a51d603a3aaf8d4cf45b42b3f2ac9318a4adc4b"
integrity sha512-jJr+hPTJYKyDILJfhNSHsjiwXYf26Flsz8DvNndOsHs5pwSnpGUEy8yzF0JYhCEvTDdV2vuOK5tt8BVhwO5/hg==
dependencies:
"@humanwhocodes/object-schema" "^1.2.1"
debug "^4.1.1"
@ -1597,7 +1597,7 @@
resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b"
integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==
"@nodelib/fs.walk@^1.2.3":
"@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8":
version "1.2.8"
resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a"
integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==
@ -5570,14 +5570,15 @@ eslint-visitor-keys@^3.3.0:
resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826"
integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==
eslint@8.25.0:
version "8.25.0"
resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.25.0.tgz#00eb962f50962165d0c4ee3327708315eaa8058b"
integrity sha512-DVlJOZ4Pn50zcKW5bYH7GQK/9MsoQG2d5eDH0ebEkE8PbgzTTmtt/VTH9GGJ4BfeZCpBLqFfvsjX35UacUL83A==
eslint@8.26.0:
version "8.26.0"
resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.26.0.tgz#2bcc8836e6c424c4ac26a5674a70d44d84f2181d"
integrity sha512-kzJkpaw1Bfwheq4VXUezFriD1GxszX6dUekM7Z3aC2o4hju+tsR/XyTC3RcoSD7jmy9VkPU3+N6YjVU2e96Oyg==
dependencies:
"@eslint/eslintrc" "^1.3.3"
"@humanwhocodes/config-array" "^0.10.5"
"@humanwhocodes/config-array" "^0.11.6"
"@humanwhocodes/module-importer" "^1.0.1"
"@nodelib/fs.walk" "^1.2.8"
ajv "^6.10.0"
chalk "^4.0.0"
cross-spawn "^7.0.2"
@ -5593,14 +5594,14 @@ eslint@8.25.0:
fast-deep-equal "^3.1.3"
file-entry-cache "^6.0.1"
find-up "^5.0.0"
glob-parent "^6.0.1"
glob-parent "^6.0.2"
globals "^13.15.0"
globby "^11.1.0"
grapheme-splitter "^1.0.4"
ignore "^5.2.0"
import-fresh "^3.0.0"
imurmurhash "^0.1.4"
is-glob "^4.0.0"
is-path-inside "^3.0.3"
js-sdsl "^4.1.4"
js-yaml "^4.1.0"
json-stable-stringify-without-jsonify "^1.0.1"
@ -6237,7 +6238,7 @@ glob-parent@^5.1.1, glob-parent@^5.1.2, glob-parent@~5.1.2:
dependencies:
is-glob "^4.0.1"
glob-parent@^6.0.1:
glob-parent@^6.0.2:
version "6.0.2"
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3"
integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==
@ -7117,6 +7118,11 @@ is-number@^7.0.0:
resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
is-path-inside@^3.0.3:
version "3.0.3"
resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283"
integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==
is-plain-obj@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e"