Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-08-10 21:10:06 +00:00
parent 1c17f34a4b
commit caff5659c9
32 changed files with 1459 additions and 244 deletions

View File

@ -6,7 +6,7 @@
.nothing-here-block {
text-align: center;
padding: 20px;
padding: 16px;
color: $gl-text-color;
font-weight: $gl-font-weight-normal;
font-size: 14px;

View File

@ -10,6 +10,9 @@ class Packages::PackageFile < ApplicationRecord
belongs_to :package
# used to move the linked file within object storage
attribute :new_file_path, default: nil
has_one :conan_file_metadatum, inverse_of: :package_file, class_name: 'Packages::Conan::FileMetadatum'
has_many :package_file_build_infos, inverse_of: :package_file, class_name: 'Packages::PackageFileBuildInfo'
has_many :pipelines, through: :package_file_build_infos
@ -80,6 +83,12 @@ class Packages::PackageFile < ApplicationRecord
before_save :update_size_from_file
# if a new_file_path is provided, we need
# * disable the remove_previously_stored_file callback so that carrierwave doesn't take care of the file
# * enable a new after_commit callback that will move the file in object storage
skip_callback :commit, :after, :remove_previously_stored_file, if: :execute_move_in_object_storage?
after_commit :move_in_object_storage, if: :execute_move_in_object_storage?
def download_path
Gitlab::Routing.url_helpers.download_project_package_file_path(project, self)
end
@ -89,6 +98,17 @@ class Packages::PackageFile < ApplicationRecord
def update_size_from_file
self.size ||= file.size
end
def execute_move_in_object_storage?
!file.file_storage? && new_file_path?
end
def move_in_object_storage
carrierwave_file = file.file
carrierwave_file.copy_to(new_file_path)
carrierwave_file.delete
end
end
Packages::PackageFile.prepend_mod_with('Packages::PackageFile')

View File

@ -13,7 +13,7 @@ class WorkItem::Type < ApplicationRecord
enum base_type: Issue.issue_types
belongs_to :group, foreign_key: :namespace_id, optional: true
belongs_to :namespace, optional: true
has_many :work_items, class_name: 'Issue', foreign_key: :work_item_type_id, inverse_of: :work_item_type
before_validation :strip_whitespace

View File

@ -6,6 +6,17 @@ module Jira
include ProjectServicesLoggable
JIRA_API_VERSION = 2
# Limit the size of the JSON error message we will attempt to parse, as the JSON is external input.
JIRA_ERROR_JSON_SIZE_LIMIT = 5_000
ERRORS = {
connection: [Errno::ECONNRESET, Errno::ECONNREFUSED],
jira_ruby: JIRA::HTTPError,
ssl: OpenSSL::SSL::SSLError,
timeout: [Timeout::Error, Errno::ETIMEDOUT],
uri: [URI::InvalidURIError, SocketError]
}.freeze
ALL_ERRORS = ERRORS.values.flatten.freeze
def initialize(jira_integration, params = {})
@project = jira_integration&.project
@ -43,15 +54,66 @@ module Jira
def request
response = client.get(url)
build_service_response(response)
rescue Timeout::Error, Errno::EINVAL, Errno::ECONNRESET, Errno::ECONNREFUSED, URI::InvalidURIError, JIRA::HTTPError, OpenSSL::SSL::SSLError => error
error_message = "Jira request error: #{error.message}"
log_error("Error sending message", client_url: client.options[:site],
error: {
exception_class: error.class.name,
exception_message: error.message,
exception_backtrace: Gitlab::BacktraceCleaner.clean_backtrace(error.backtrace)
})
ServiceResponse.error(message: error_message)
rescue *ALL_ERRORS => e
log_error('Error sending message',
client_url: client.options[:site],
error: {
exception_class: e.class.name,
exception_message: e.message,
exception_backtrace: Gitlab::BacktraceCleaner.clean_backtrace(e.backtrace)
}
)
ServiceResponse.error(message: error_message(e))
end
def error_message(error)
reportable_error_message(error) ||
s_('JiraRequest|An error occurred while requesting data from Jira. Check your Jira integration configuration and try again.')
end
# Returns a user-facing error message if possible, otherwise `nil`.
def reportable_error_message(error)
case error
when ERRORS[:jira_ruby]
reportable_jira_ruby_error_message(error)
when ERRORS[:ssl]
s_('JiraRequest|An SSL error occurred while connecting to Jira: %{message}. Try your request again.') % { message: error.message }
when *ERRORS[:uri]
s_('JiraRequest|The Jira API URL for connecting to Jira is not valid. Check your Jira integration API URL and try again.')
when *ERRORS[:timeout]
s_('JiraRequest|A timeout error occurred while connecting to Jira. Try your request again.')
when *ERRORS[:connection]
s_('JiraRequest|A connection error occurred while connecting to Jira. Try your request again.')
end
end
# Returns a user-facing error message for a `JIRA::HTTPError` if possible,
# otherwise `nil`.
def reportable_jira_ruby_error_message(error)
case error.message
when 'Unauthorized'
s_('JiraRequest|The credentials for accessing Jira are not valid. Check your Jira integration credentials and try again.')
when 'Forbidden'
s_('JiraRequest|The credentials for accessing Jira are not allowed to access the data. Check your Jira integration credentials and try again.')
when 'Bad Request'
s_('JiraRequest|An error occurred while requesting data from Jira. Check your Jira integration configuration and try again.')
when /errorMessages/
jira_ruby_json_error_message(error.message)
end
end
def jira_ruby_json_error_message(error_message)
return if error_message.length > JIRA_ERROR_JSON_SIZE_LIMIT
begin
messages = Gitlab::Json.parse(error_message)['errorMessages']&.to_sentence
messages = Rails::Html::FullSanitizer.new.sanitize(messages).presence
return unless messages
s_('JiraRequest|An error occurred while requesting data from Jira: %{messages}. Check your Jira integration configuration and try again.') % { messages: messages }
rescue JSON::ParserError
end
end
def url

View File

@ -21,22 +21,11 @@ module Packages
try_obtain_lease do
@package_file.transaction do
if existing_package
package = link_to_existing_package
elsif symbol_package?
raise InvalidMetadataError, 'symbol package is invalid, matching package does not exist'
if use_new_package_file_updater?
new_execute
else
package = update_linked_package
legacy_execute
end
update_package(package)
# Updating file_name updates the path where the file is stored.
# We must pass the file again so that CarrierWave can handle the update
@package_file.update!(
file_name: package_filename,
file: @package_file.file
)
end
end
rescue ActiveRecord::RecordInvalid => e
@ -45,6 +34,52 @@ module Packages
private
def new_execute
package_to_destroy = nil
target_package = @package_file.package
if existing_package
package_to_destroy = @package_file.package
target_package = existing_package
else
if symbol_package?
raise InvalidMetadataError, 'symbol package is invalid, matching package does not exist'
end
update_linked_package
end
update_package(target_package)
::Packages::UpdatePackageFileService.new(@package_file, package_id: target_package.id, file_name: package_filename)
.execute
package_to_destroy&.destroy!
end
def legacy_execute
if existing_package
package = link_to_existing_package
elsif symbol_package?
raise InvalidMetadataError, 'symbol package is invalid, matching package does not exist'
else
package = update_linked_package
end
update_package(package)
# Updating file_name updates the path where the file is stored.
# We must pass the file again so that CarrierWave can handle the update
@package_file.update!(
file_name: package_filename,
file: @package_file.file
)
end
def use_new_package_file_updater?
::Feature.enabled?(:packages_nuget_new_package_file_updater, @package_file.project, default_enabled: :yaml)
end
def update_package(package)
return if symbol_package?

View File

@ -0,0 +1,58 @@
# frozen_string_literal: true
module Packages
class UpdatePackageFileService
delegate :file, to: :@package_file
def initialize(package_file, params)
@package_file = package_file
@params = params
end
def execute
check_params
return if same_as_params?
# we need to access the file *before* updating the attributes linked to its path/key.
file_storage_mode = file.file_storage?
@package_file.package_id = package_id if package_id
@package_file.file_name = file_name if file_name
if file_storage_mode
# package file is in mode LOCAL: we can pass the `file` to the update
@package_file.file = file
else
# package file is in mode REMOTE: don't pass the `file` to the update
# instead, pass the new file path. This will move the file
# in object storage.
@package_file.new_file_path = File.join(file.store_dir, @package_file.file_name)
end
@package_file.save!
end
private
def check_params
raise ArgumentError, 'package_file not persisted' unless @package_file.persisted?
raise ArgumentError, 'package_id and file_name are blank' if package_id.blank? && file_name.blank?
end
def same_as_params?
return false if package_id && package_id != @package_file.package_id
return false if file_name && file_name != @package_file.file_name
true
end
def package_id
@params[:package_id]
end
def file_name
@params[:file_name]
end
end
end

View File

@ -51,9 +51,6 @@ Rails.application.configure do
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Enable serving of images, stylesheets, and JavaScripts from an asset server
config.action_controller.asset_host = ENV['GITLAB_CDN_HOST'] if ENV['GITLAB_CDN_HOST'].present?
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false

View File

@ -0,0 +1,8 @@
---
name: packages_nuget_new_package_file_updater
introduced_by_url:
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/336511
milestone: '14.1'
type: development
group: group::package
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: vulnerability_finding_tracking_signatures
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/54608
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/322044
milestone: '13.11'
type: development
group: group::vulnerability research
default_enabled: false

View File

@ -33,6 +33,10 @@ production: &base
host: localhost
port: 80 # Set to 443 if using HTTPS, see installation.md#using-https for additional HTTPS configuration details
https: false # Set to true if using HTTPS, see installation.md#using-https for additional HTTPS configuration details
# Uncomment this line if you want to configure the Rails asset host for a CDN.
# cdn_host: localhost
# The maximum time Puma can spend on the request. This needs to be smaller than the worker timeout.
# Default is 95% of the worker timeout
max_request_duration_seconds: 57

View File

@ -164,6 +164,7 @@ Settings.gitlab['default_branch_protection'] ||= 2
Settings.gitlab['default_can_create_group'] = true if Settings.gitlab['default_can_create_group'].nil?
Settings.gitlab['default_theme'] = Gitlab::Themes::APPLICATION_DEFAULT if Settings.gitlab['default_theme'].nil?
Settings.gitlab['host'] ||= ENV['GITLAB_HOST'] || 'localhost'
Settings.gitlab['cdn_host'] ||= ENV['GITLAB_CDN_HOST'].presence
Settings.gitlab['ssh_host'] ||= Settings.gitlab.host
Settings.gitlab['https'] = false if Settings.gitlab['https'].nil?
Settings.gitlab['port'] ||= ENV['GITLAB_PORT'] || (Settings.gitlab.https ? 443 : 80)
@ -209,7 +210,7 @@ Settings.gitlab.default_projects_features['visibility_level'] = Settings.__sen
Settings.gitlab['domain_allowlist'] ||= []
Settings.gitlab['import_sources'] ||= Gitlab::ImportSources.values
Settings.gitlab['trusted_proxies'] ||= []
Settings.gitlab['content_security_policy'] ||= Gitlab::ContentSecurityPolicy::ConfigLoader.default_settings_hash
Settings.gitlab['content_security_policy'] ||= Gitlab::ContentSecurityPolicy::ConfigLoader.default_settings_hash(Settings.gitlab['cdn_host'])
Settings.gitlab['allowed_hosts'] ||= []
Settings.gitlab['no_todos_messages'] ||= YAML.load_file(Rails.root.join('config', 'no_todos_messages.yml'))
Settings.gitlab['impersonation_enabled'] ||= true if Settings.gitlab['impersonation_enabled'].nil?

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
if Gitlab.config.gitlab.cdn_host.present?
Rails.application.configure do
config.after_initialize do
# Enable serving of images, stylesheets, and JavaScripts from an asset server
Rails.application.config.action_controller.asset_host = Gitlab.config.gitlab.cdn_host
# If ActionController::Base is called before this initializer, then we must set
# the configuration directly.
# See https://github.com/rails/rails/issues/16209
ActionController::Base.asset_host = Gitlab.config.gitlab.cdn_host
end
end
end

View File

@ -580,6 +580,54 @@ However, this behavior is undesirable for registries used by internal hosts that
1. Save the file and [restart GitLab](../restart_gitlab.md#installations-from-source) for the changes to take effect.
#### Encrypted S3 buckets
You can use server-side encryption with AWS KMS for S3 buckets that have
[SSE-S3 or SSE-KMS encryption enabled by default](https://docs.aws.amazon.com/kms/latest/developerguide/services-s3.html).
Customer master keys (CMKs) and SSE-C encryption aren't supported since this requires sending the
encryption keys in every request.
For SSE-S3, you must enable the `encrypt` option in the registry settings. How you do this depends
on how you installed GitLab. Follow the instructions here that match your installation method.
For Omnibus GitLab installations:
1. Edit `/etc/gitlab/gitlab.rb`:
```ruby
registry['storage'] = {
's3' => {
'accesskey' => 's3-access-key',
'secretkey' => 's3-secret-key-for-access-key',
'bucket' => 'your-s3-bucket',
'region' => 'your-s3-region',
'regionendpoint' => 'your-s3-regionendpoint',
'encrypt' => true
}
}
```
1. Save the file and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure)
for the changes to take effect.
For installations from source:
1. Edit your registry configuration YML file:
```yaml
storage:
s3:
accesskey: 'AKIAKIAKI'
secretkey: 'secret123'
bucket: 'gitlab-registry-bucket-AKIAKIAKI'
region: 'your-s3-region'
regionendpoint: 'your-s3-regionendpoint'
encrypt: true
```
1. Save the file and [restart GitLab](../restart_gitlab.md#installations-from-source)
for the changes to take effect.
### Storage limitations
Currently, there is no storage limitation, which means a user can upload an

View File

@ -4,23 +4,24 @@ group: Runner
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Build Cloud runners for macOS (beta)
# Build Cloud runners for macOS (Beta)
Build Cloud for macOS Beta provides on-demand GitLab Runners integrated with GitLab SaaS [CI/CD](../../../ci/index.md)
to build, test, and deploy apps for the Apple ecosystem (macOS, iOS, tvOS). You can take advantage
of all the capabilities of the GitLab single DevOps platform and not have to manage or operate a
build environment.
Build Cloud runners for macOS are in [beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#beta)
Build Cloud runners for macOS are in [Beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#beta)
and shouldn't be relied upon for mission-critical production jobs.
## Quickstart
To start using Build Cloud for macOS beta, you must submit an access request issue. After your
To start using Build Cloud for macOS Beta, you must submit an access request issue. After your
access has been granted and your build environment configured, you must configure your
`.gitlab-ci.yml` pipeline file:
1. Add a `.gitlab-ci.yml` file to your project repository.
1. Specify the [image](macos/environment.md#vm-images) you want to use.
1. Commit a change to your repository.
The runners automatically run your build.

View File

@ -254,6 +254,21 @@ Lowercase. If you use **MR** as the acronym, spell it out on first use.
Lowercase.
## need to, should
Try to avoid. If something is required, use **must**.
- Avoid: You need to set the variable.
- Use instead: You must set the variable. Or: Set the variable.
**Should** is acceptable for recommended actions or items, or in cases where an event may not
happen. For example:
- Although you can configure the installation manually, you should use the express configuration to
avoid complications.
- You should see a success message in the console. Contact support if an error message appears
instead.
## note that
Do not use.

View File

@ -25,7 +25,18 @@ you can create an escalation policy:
![Escalation Policy](img/escalation_policy_v14_1.png)
### Edit an escalation policy
### Select the responder of an escalation rule
When configuring an escalation rule, you can designate who to page:
- **Email on-call user in schedule:** notifies the users who are on-call when the rule is triggered,
covering all rotations on the specified [on-call schedule](oncall_schedules.md).
- **Email user:** notifies the specified user directly.
When a notification is sent to a user through an on-call schedule or directly, a system note listing
the paged users is created on the alert.
## Edit an escalation policy
Follow these steps to update an escalation policy:
@ -34,7 +45,7 @@ Follow these steps to update an escalation policy:
1. In the **Edit policy** form, edit the information you wish to update.
1. Select the **Edit policy** button to save your changes.
### Delete an escalation policy
## Delete an escalation policy
Follow these steps to delete a policy:

View File

@ -0,0 +1,166 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Reports
module Security
class VulnerabilityReportsComparer
include Gitlab::Utils::StrongMemoize
attr_reader :base_report, :head_report
ACCEPTABLE_REPORT_AGE = 1.week
def initialize(project, base_report, head_report)
@base_report = base_report
@head_report = head_report
@signatures_enabled = (
::Feature.enabled?(:vulnerability_finding_tracking_signatures, project) &&
project.licensed_feature_available?(:vulnerability_finding_signatures)
)
if @signatures_enabled
@added_findings = []
@fixed_findings = []
calculate_changes
end
end
def base_report_created_at
@base_report.created_at
end
def head_report_created_at
@head_report.created_at
end
def base_report_out_of_date
return false unless @base_report.created_at
ACCEPTABLE_REPORT_AGE.ago > @base_report.created_at
end
def added
strong_memoize(:added) do
if @signatures_enabled
@added_findings
else
head_report.findings - base_report.findings
end
end
end
def fixed
strong_memoize(:fixed) do
if @signatures_enabled
@fixed_findings
else
base_report.findings - head_report.findings
end
end
end
private
def calculate_changes
# This is a deconstructed version of the eql? method on
# Ci::Reports::Security::Finding. It:
#
# * precomputes for the head_findings (using FindingMatcher):
# * sets of signature shas grouped by priority
# * mappings of signature shas to the head finding object
#
# These are then used when iterating the base findings to perform
# fast(er) prioritized, signature-based comparisons between each base finding
# and the head findings.
#
# Both the head_findings and base_findings arrays are iterated once
base_findings = base_report.findings
head_findings = head_report.findings
matcher = FindingMatcher.new(head_findings)
base_findings.each do |base_finding|
matched_head_finding = matcher.find_and_remove_match!(base_finding)
@fixed_findings << base_finding if matched_head_finding.nil?
end
@added_findings = matcher.unmatched_head_findings.values
end
end
class FindingMatcher
attr_reader :unmatched_head_findings, :head_findings
include Gitlab::Utils::StrongMemoize
def initialize(head_findings)
@head_findings = head_findings
@unmatched_head_findings = @head_findings.index_by(&:object_id)
end
def find_and_remove_match!(base_finding)
matched_head_finding = find_matched_head_finding_for(base_finding)
# no signatures matched, so check the normal uuids of the base and head findings
# for a match
matched_head_finding = head_signatures_shas[base_finding.uuid] if matched_head_finding.nil?
@unmatched_head_findings.delete(matched_head_finding.object_id) unless matched_head_finding.nil?
matched_head_finding
end
private
def find_matched_head_finding_for(base_finding)
base_signature = sorted_signatures_for(base_finding).find do |signature|
# at this point a head_finding exists that has a signature with a
# matching priority, and a matching sha --> lookup the actual finding
# object from head_signatures_shas
head_signatures_shas[signature.signature_sha].eql?(base_finding)
end
base_signature.present? ? head_signatures_shas[base_signature.signature_sha] : nil
end
def sorted_signatures_for(base_finding)
base_finding.signatures.select { |signature| head_finding_signature?(signature) }
.sort_by { |sig| -sig.priority }
end
def head_finding_signature?(signature)
head_signatures_priorities[signature.priority].include?(signature.signature_sha)
end
def head_signatures_priorities
strong_memoize(:head_signatures_priorities) do
signatures_priorities = Hash.new { |hash, key| hash[key] = Set.new }
head_findings.each_with_object(signatures_priorities) do |head_finding, memo|
head_finding.signatures.each do |signature|
memo[signature.priority].add(signature.signature_sha)
end
end
end
end
def head_signatures_shas
strong_memoize(:head_signatures_shas) do
head_findings.each_with_object({}) do |head_finding, memo|
head_finding.signatures.each do |signature|
memo[signature.signature_sha] = head_finding
end
# for the final uuid check when no signatures have matched
memo[head_finding.uuid] = head_finding
end
end
end
end
end
end
end
end

View File

@ -7,7 +7,7 @@ module Gitlab
form_action frame_ancestors frame_src img_src manifest_src
media_src object_src report_uri script_src style_src worker_src).freeze
def self.default_settings_hash
def self.default_settings_hash(cdn_host)
settings_hash = {
'enabled' => Rails.env.development? || Rails.env.test?,
'report_only' => false,
@ -36,7 +36,7 @@ module Gitlab
settings_hash['directives']['child_src'] = settings_hash['directives']['frame_src']
allow_webpack_dev_server(settings_hash) if Rails.env.development?
allow_cdn(settings_hash) if ENV['GITLAB_CDN_HOST'].present?
allow_cdn(settings_hash, cdn_host) if cdn_host.present?
allow_customersdot(settings_hash) if Rails.env.development? && ENV['CUSTOMER_PORTAL_URL'].present?
settings_hash
@ -75,9 +75,7 @@ module Gitlab
append_to_directive(settings_hash, 'connect_src', "#{http_url} #{ws_url}")
end
def self.allow_cdn(settings_hash)
cdn_host = ENV['GITLAB_CDN_HOST']
def self.allow_cdn(settings_hash, cdn_host)
append_to_directive(settings_hash, 'script_src', cdn_host)
append_to_directive(settings_hash, 'style_src', cdn_host)
append_to_directive(settings_hash, 'font_src', cdn_host)

View File

@ -3756,9 +3756,6 @@ msgstr ""
msgid "An error occurred while reordering issues."
msgstr ""
msgid "An error occurred while requesting data from the Jira service."
msgstr ""
msgid "An error occurred while retrieving calendar activity"
msgstr ""
@ -4191,6 +4188,12 @@ msgstr ""
msgid "ApprovalRule|Name"
msgstr ""
msgid "ApprovalRule|Number of vulnerabilities allowed before approval rule is triggered."
msgstr ""
msgid "ApprovalRule|Please enter a number equal or greater than zero"
msgstr ""
msgid "ApprovalRule|Please select at least one security scanner"
msgstr ""
@ -4209,6 +4212,9 @@ msgstr ""
msgid "ApprovalRule|Target branch"
msgstr ""
msgid "ApprovalRule|Vulnerabilities allowed"
msgstr ""
msgid "ApprovalSettings|Merge request approval settings have been updated."
msgstr ""
@ -9199,6 +9205,9 @@ msgstr ""
msgid "CorpusManagement|Target"
msgstr ""
msgid "CorpusManagement|To use this corpus, edit the corresponding YAML file"
msgstr ""
msgid "CorpusManagement|Total Size: %{totalSize}"
msgstr ""
@ -18824,6 +18833,30 @@ msgstr ""
msgid "JiraConnect|You can now close this window and return to Jira."
msgstr ""
msgid "JiraRequest|A connection error occurred while connecting to Jira. Try your request again."
msgstr ""
msgid "JiraRequest|A timeout error occurred while connecting to Jira. Try your request again."
msgstr ""
msgid "JiraRequest|An SSL error occurred while connecting to Jira: %{message}. Try your request again."
msgstr ""
msgid "JiraRequest|An error occurred while requesting data from Jira. Check your Jira integration configuration and try again."
msgstr ""
msgid "JiraRequest|An error occurred while requesting data from Jira: %{messages}. Check your Jira integration configuration and try again."
msgstr ""
msgid "JiraRequest|The Jira API URL for connecting to Jira is not valid. Check your Jira integration API URL and try again."
msgstr ""
msgid "JiraRequest|The credentials for accessing Jira are not allowed to access the data. Check your Jira integration credentials and try again."
msgstr ""
msgid "JiraRequest|The credentials for accessing Jira are not valid. Check your Jira integration credentials and try again."
msgstr ""
msgid "JiraService| on branch %{branch_link}"
msgstr ""

View File

@ -2,10 +2,14 @@
FactoryBot.define do
factory :work_item_type, class: 'WorkItem::Type' do
group
namespace
name { generate(:work_item_type_name) }
icon_name { 'issue' }
base_type { Issue.issue_types['issue'] }
trait :default do
namespace { nil }
end
end
end

View File

@ -86,11 +86,11 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver do
context 'when Jira connection is not valid' do
before do
WebMock.stub_request(:get, 'https://jira.example.com/rest/api/2/project')
.to_raise(JIRA::HTTPError.new(double(message: 'Some failure.')))
.to_raise(JIRA::HTTPError.new(double(message: '{"errorMessages":["Some failure"]}')))
end
it 'raises failure error' do
expect { resolve_jira_projects }.to raise_error('Jira request error: Some failure.')
expect { resolve_jira_projects }.to raise_error('An error occurred while requesting data from Jira: Some failure. Check your Jira integration configuration and try again.')
end
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Rails asset host initializer' do
def load_initializer
load Rails.root.join('config/initializers/rails_asset_host.rb')
end
subject { Rails.application.config.action_controller.asset_host }
it 'uses no asset host by default' do
load_initializer
expect(subject).to be nil
end
context 'with cdn_host defined in gitlab.yml' do
before do
stub_config_setting(cdn_host: 'https://gitlab.example.com')
end
it 'returns https://gitlab.example.com' do
load_initializer
expect(subject).to eq('https://gitlab.example.com')
end
end
end

View File

@ -0,0 +1,164 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Ci::Reports::Security::VulnerabilityReportsComparer do
let(:identifier) { build(:ci_reports_security_identifier) }
let_it_be(:project) { create(:project, :repository) }
let(:location_param) { build(:ci_reports_security_locations_sast, :dynamic) }
let(:vulnerability_params) { vuln_params(project.id, [identifier], confidence: :low, severity: :critical) }
let(:base_vulnerability) { build(:ci_reports_security_finding, location: location_param, **vulnerability_params) }
let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [base_vulnerability]) }
let(:head_vulnerability) { build(:ci_reports_security_finding, location: location_param, uuid: base_vulnerability.uuid, **vulnerability_params) }
let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: [head_vulnerability]) }
shared_context 'comparing reports' do
let(:vul_params) { vuln_params(project.id, [identifier]) }
let(:base_vulnerability) { build(:ci_reports_security_finding, :dynamic, **vul_params) }
let(:head_vulnerability) { build(:ci_reports_security_finding, :dynamic, **vul_params) }
let(:head_vul_findings) { [head_vulnerability, vuln] }
end
subject { described_class.new(project, base_report, head_report) }
where(vulnerability_finding_tracking_signatures_enabled: [true, false])
with_them do
before do
stub_feature_flags(vulnerability_finding_tracking_signatures: vulnerability_finding_tracking_signatures_enabled)
stub_licensed_features(vulnerability_finding_signatures: vulnerability_finding_tracking_signatures_enabled)
end
describe '#base_report_out_of_date' do
context 'no base report' do
let(:base_report) { build(:ci_reports_security_aggregated_reports, reports: [], findings: []) }
it 'is not out of date' do
expect(subject.base_report_out_of_date).to be false
end
end
context 'base report older than one week' do
let(:report) { build(:ci_reports_security_report, created_at: 1.week.ago - 60.seconds) }
let(:base_report) { build(:ci_reports_security_aggregated_reports, reports: [report]) }
it 'is not out of date' do
expect(subject.base_report_out_of_date).to be true
end
end
context 'base report less than one week old' do
let(:report) { build(:ci_reports_security_report, created_at: 1.week.ago + 60.seconds) }
let(:base_report) { build(:ci_reports_security_aggregated_reports, reports: [report]) }
it 'is not out of date' do
expect(subject.base_report_out_of_date).to be false
end
end
end
describe '#added' do
let(:new_location) {build(:ci_reports_security_locations_sast, :dynamic) }
let(:vul_params) { vuln_params(project.id, [identifier], confidence: :high) }
let(:vuln) { build(:ci_reports_security_finding, severity: Enums::Vulnerability.severity_levels[:critical], location: new_location, **vul_params) }
let(:low_vuln) { build(:ci_reports_security_finding, severity: Enums::Vulnerability.severity_levels[:low], location: new_location, **vul_params) }
context 'with new vulnerability' do
let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: [head_vulnerability, vuln]) }
it 'points to source tree' do
expect(subject.added).to eq([vuln])
end
end
context 'when comparing reports with different fingerprints' do
include_context 'comparing reports'
let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: head_vul_findings) }
it 'does not find any overlap' do
expect(subject.added).to eq(head_vul_findings)
end
end
context 'order' do
let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: [head_vulnerability, vuln, low_vuln]) }
it 'does not change' do
expect(subject.added).to eq([vuln, low_vuln])
end
end
end
describe '#fixed' do
let(:vul_params) { vuln_params(project.id, [identifier]) }
let(:vuln) { build(:ci_reports_security_finding, :dynamic, **vul_params ) }
let(:medium_vuln) { build(:ci_reports_security_finding, confidence: ::Enums::Vulnerability.confidence_levels[:high], severity: Enums::Vulnerability.severity_levels[:medium], uuid: vuln.uuid, **vul_params) }
context 'with fixed vulnerability' do
let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [base_vulnerability, vuln]) }
it 'points to base tree' do
expect(subject.fixed).to eq([vuln])
end
end
context 'when comparing reports with different fingerprints' do
include_context 'comparing reports'
let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [base_vulnerability, vuln]) }
it 'does not find any overlap' do
expect(subject.fixed).to eq([base_vulnerability, vuln])
end
end
context 'order' do
let(:vul_findings) { [vuln, medium_vuln] }
let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [*vul_findings, base_vulnerability]) }
it 'does not change' do
expect(subject.fixed).to eq(vul_findings)
end
end
end
describe 'with empty vulnerabilities' do
let(:empty_report) { build(:ci_reports_security_aggregated_reports, reports: [], findings: []) }
it 'returns empty array when reports are not present' do
comparer = described_class.new(project, empty_report, empty_report)
expect(comparer.fixed).to eq([])
expect(comparer.added).to eq([])
end
it 'returns added vulnerability when base is empty and head is not empty' do
comparer = described_class.new(project, empty_report, head_report)
expect(comparer.fixed).to eq([])
expect(comparer.added).to eq([head_vulnerability])
end
it 'returns fixed vulnerability when head is empty and base is not empty' do
comparer = described_class.new(project, base_report, empty_report)
expect(comparer.fixed).to eq([base_vulnerability])
expect(comparer.added).to eq([])
end
end
end
def vuln_params(project_id, identifiers, confidence: :high, severity: :critical)
{
project_id: project_id,
report_type: :sast,
identifiers: identifiers,
confidence: ::Enums::Vulnerability.confidence_levels[confidence],
severity: ::Enums::Vulnerability.severity_levels[severity]
}
end
end

View File

@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
let(:policy) { ActionDispatch::ContentSecurityPolicy.new }
let(:cdn_host) { nil }
let(:csp_config) do
{
enabled: true,
@ -20,7 +21,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
describe '.default_settings_hash' do
let(:settings) { described_class.default_settings_hash }
let(:settings) { described_class.default_settings_hash(cdn_host) }
it 'returns defaults for all keys' do
expect(settings['enabled']).to be_truthy
@ -48,12 +49,10 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
end
context 'when GITLAB_CDN_HOST is set' do
before do
stub_env('GITLAB_CDN_HOST', 'https://example.com')
end
context 'when CDN host is defined' do
let(:cdn_host) { 'https://example.com' }
it 'adds GITLAB_CDN_HOST to CSP' do
it 'adds CDN host to CSP' do
directives = settings['directives']
expect(directives['script_src']).to eq("'strict-dynamic' 'self' 'unsafe-inline' 'unsafe-eval' https://www.google.com/recaptcha/ https://www.recaptcha.net https://apis.google.com https://example.com")

View File

@ -159,4 +159,71 @@ RSpec.describe Packages::PackageFile, type: :model do
expect { subject }.to change { package_file.size }.from(nil).to(3513)
end
end
context 'update callbacks' do
subject { package_file.save! }
shared_examples 'executing the default callback' do
it 'executes the default callback' do
expect(package_file).to receive(:remove_previously_stored_file)
expect(package_file).not_to receive(:move_in_object_storage)
subject
end
end
context 'with object storage disabled' do
let(:package_file) { create(:package_file, file_name: 'file_name.txt') }
before do
stub_package_file_object_storage(enabled: false)
end
it_behaves_like 'executing the default callback'
context 'with new_file_path set' do
before do
package_file.new_file_path = 'test'
end
it_behaves_like 'executing the default callback'
end
end
context 'with object storage enabled' do
let(:package_file) do
create(
:package_file,
file_name: 'file_name.txt',
file: CarrierWaveStringFile.new_file(
file_content: 'content',
filename: 'file_name.txt',
content_type: 'text/plain'
),
file_store: ::Packages::PackageFileUploader::Store::REMOTE
)
end
before do
stub_package_file_object_storage(enabled: true)
end
it_behaves_like 'executing the default callback'
context 'with new_file_path set' do
before do
package_file.new_file_path = 'test'
end
it 'executes the move_in_object_storage callback' do
expect(package_file).not_to receive(:remove_previously_stored_file)
expect(package_file).to receive(:move_in_object_storage).and_call_original
expect(package_file.file.file).to receive(:copy_to).and_call_original
expect(package_file.file.file).to receive(:delete).and_call_original
subject
end
end
end
end
end

View File

@ -9,7 +9,7 @@ RSpec.describe WorkItem::Type do
describe 'associations' do
it { is_expected.to have_many(:work_items).with_foreign_key('work_item_type_id') }
it { is_expected.to belong_to(:group).with_foreign_key('namespace_id') }
it { is_expected.to belong_to(:namespace) }
end
describe '#destroy' do

View File

@ -43,20 +43,7 @@ RSpec.describe Jira::Requests::Projects::ListService do
stub_request(:get, expected_url_pattern).to_return(status: 200, body: response_body, headers: response_headers)
end
context 'when the request to Jira returns an error' do
before do
expect_next(JIRA::Client).to receive(:get).and_raise(Timeout::Error)
end
it 'returns an error response' do
expect(Gitlab::ProjectServiceLogger).to receive(:error).with(
hash_including(
error: hash_including(:exception_class, :exception_message, :exception_backtrace)))
.and_call_original
expect(subject.error?).to be_truthy
expect(subject.message).to eq('Jira request error: Timeout::Error')
end
end
it_behaves_like 'a service that handles Jira API errors'
context 'when jira runs on a subpath' do
let(:jira_integration) { create(:jira_integration, url: 'http://jira.example.com/jira') }

View File

@ -49,7 +49,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
allow(service).to receive(:metadata).and_return(metadata)
end
it 'does not update the package' do
it 'does not update the package', :aggregate_failures do
expect(service).to receive(:try_obtain_lease).and_call_original
expect { subject }
@ -63,152 +63,28 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
end
end
context 'with no existing package' do
let(:package_id) { package.id }
it 'updates package and package file' do
expect { subject }
.to change { ::Packages::Package.count }.by(1)
.and change { Packages::Dependency.count }.by(1)
.and change { Packages::DependencyLink.count }.by(1)
.and change { ::Packages::Nuget::Metadatum.count }.by(0)
expect(package.reload.name).to eq(package_name)
expect(package.version).to eq(package_version)
expect(package).to be_default
expect(package_file.reload.file_name).to eq(package_file_name)
# hard reset needed to properly reload package_file.file
expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
end
it_behaves_like 'taking the lease'
it_behaves_like 'not updating the package if the lease is taken'
end
context 'with existing package' do
let!(:existing_package) { create(:nuget_package, project: package.project, name: package_name, version: package_version) }
let(:package_id) { existing_package.id }
it 'link existing package and updates package file' do
expect(service).to receive(:try_obtain_lease).and_call_original
expect { subject }
.to change { ::Packages::Package.count }.by(-1)
.and change { Packages::Dependency.count }.by(0)
.and change { Packages::DependencyLink.count }.by(0)
.and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
.and change { ::Packages::Nuget::Metadatum.count }.by(0)
expect(package_file.reload.file_name).to eq(package_file_name)
expect(package_file.package).to eq(existing_package)
end
it_behaves_like 'taking the lease'
it_behaves_like 'not updating the package if the lease is taken'
end
context 'with a nuspec file with metadata' do
let(:nuspec_filepath) { 'packages/nuget/with_metadata.nuspec' }
let(:expected_tags) { %w(foo bar test tag1 tag2 tag3 tag4 tag5) }
before do
allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
allow(service)
.to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
end
end
it 'creates tags' do
expect(service).to receive(:try_obtain_lease).and_call_original
expect { subject }.to change { ::Packages::Tag.count }.by(8)
expect(package.reload.tags.map(&:name)).to contain_exactly(*expected_tags)
end
context 'with existing package and tags' do
let!(:existing_package) { create(:nuget_package, project: package.project, name: 'DummyProject.WithMetadata', version: '1.2.3') }
let!(:tag1) { create(:packages_tag, package: existing_package, name: 'tag1') }
let!(:tag2) { create(:packages_tag, package: existing_package, name: 'tag2') }
let!(:tag3) { create(:packages_tag, package: existing_package, name: 'tag_not_in_metadata') }
it 'creates tags and deletes those not in metadata' do
expect(service).to receive(:try_obtain_lease).and_call_original
expect { subject }.to change { ::Packages::Tag.count }.by(5)
expect(existing_package.tags.map(&:name)).to contain_exactly(*expected_tags)
end
end
it 'creates nuget metadatum' do
expect { subject }
.to change { ::Packages::Package.count }.by(1)
.and change { ::Packages::Nuget::Metadatum.count }.by(1)
metadatum = package_file.reload.package.nuget_metadatum
expect(metadatum.license_url).to eq('https://opensource.org/licenses/MIT')
expect(metadatum.project_url).to eq('https://gitlab.com/gitlab-org/gitlab')
expect(metadatum.icon_url).to eq('https://opensource.org/files/osi_keyhole_300X300_90ppi_0.png')
end
context 'with too long url' do
let_it_be(:too_long_url) { "http://localhost/#{'bananas' * 50}" }
let(:metadata) { { package_name: package_name, package_version: package_version, license_url: too_long_url } }
before do
allow(service).to receive(:metadata).and_return(metadata)
end
it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
end
end
context 'with nuspec file with dependencies' do
let(:nuspec_filepath) { 'packages/nuget/with_dependencies.nuspec' }
let(:package_name) { 'Test.Package' }
let(:package_version) { '3.5.2' }
let(:package_file_name) { 'test.package.3.5.2.nupkg' }
before do
allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
allow(service)
.to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
end
end
it 'updates package and package file' do
expect { subject }
.to change { ::Packages::Package.count }.by(1)
.and change { Packages::Dependency.count }.by(4)
.and change { Packages::DependencyLink.count }.by(4)
.and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(2)
expect(package.reload.name).to eq(package_name)
expect(package.version).to eq(package_version)
expect(package).to be_default
expect(package_file.reload.file_name).to eq(package_file_name)
# hard reset needed to properly reload package_file.file
expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
end
end
context 'with package file not containing a nuspec file' do
before do
allow_next_instance_of(Zip::File) do |file|
allow(file).to receive(:glob).and_return([])
end
end
it_behaves_like 'raising an', ::Packages::Nuget::MetadataExtractionService::ExtractionError
end
context 'with a symbol package' do
let(:package_file) { package.package_files.last }
let(:package_file_name) { 'dummyproject.dummypackage.1.0.0.snupkg' }
shared_examples 'handling all conditions' do
context 'with no existing package' do
let(:package_id) { package.id }
it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
it 'updates package and package file', :aggregate_failures do
expect { subject }
.to not_change { ::Packages::Package.count }
.and change { Packages::Dependency.count }.by(1)
.and change { Packages::DependencyLink.count }.by(1)
.and change { ::Packages::Nuget::Metadatum.count }.by(0)
expect(package.reload.name).to eq(package_name)
expect(package.version).to eq(package_version)
expect(package).to be_default
expect(package_file.reload.file_name).to eq(package_file_name)
# hard reset needed to properly reload package_file.file
expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
end
it_behaves_like 'taking the lease'
it_behaves_like 'not updating the package if the lease is taken'
end
context 'with existing package' do
@ -217,8 +93,6 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
it 'link existing package and updates package file', :aggregate_failures do
expect(service).to receive(:try_obtain_lease).and_call_original
expect(::Packages::Nuget::SyncMetadatumService).not_to receive(:new)
expect(::Packages::UpdateTagsService).not_to receive(:new)
expect { subject }
.to change { ::Packages::Package.count }.by(-1)
@ -234,42 +108,189 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
it_behaves_like 'not updating the package if the lease is taken'
end
end
context 'with an invalid package name' do
invalid_names = [
'',
'My/package',
'../../../my_package',
'%2e%2e%2fmy_package'
]
context 'with a nuspec file with metadata' do
let(:nuspec_filepath) { 'packages/nuget/with_metadata.nuspec' }
let(:expected_tags) { %w(foo bar test tag1 tag2 tag3 tag4 tag5) }
invalid_names.each do |invalid_name|
before do
allow(service).to receive(:package_name).and_return(invalid_name)
allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
allow(service)
.to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
end
end
it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
it 'creates tags' do
expect(service).to receive(:try_obtain_lease).and_call_original
expect { subject }.to change { ::Packages::Tag.count }.by(8)
expect(package.reload.tags.map(&:name)).to contain_exactly(*expected_tags)
end
context 'with existing package and tags' do
let!(:existing_package) { create(:nuget_package, project: package.project, name: 'DummyProject.WithMetadata', version: '1.2.3') }
let!(:tag1) { create(:packages_tag, package: existing_package, name: 'tag1') }
let!(:tag2) { create(:packages_tag, package: existing_package, name: 'tag2') }
let!(:tag3) { create(:packages_tag, package: existing_package, name: 'tag_not_in_metadata') }
it 'creates tags and deletes those not in metadata' do
expect(service).to receive(:try_obtain_lease).and_call_original
expect { subject }.to change { ::Packages::Tag.count }.by(5)
expect(existing_package.tags.map(&:name)).to contain_exactly(*expected_tags)
end
end
it 'creates nuget metadatum', :aggregate_failures do
expect { subject }
.to not_change { ::Packages::Package.count }
.and change { ::Packages::Nuget::Metadatum.count }.by(1)
metadatum = package_file.reload.package.nuget_metadatum
expect(metadatum.license_url).to eq('https://opensource.org/licenses/MIT')
expect(metadatum.project_url).to eq('https://gitlab.com/gitlab-org/gitlab')
expect(metadatum.icon_url).to eq('https://opensource.org/files/osi_keyhole_300X300_90ppi_0.png')
end
context 'with too long url' do
let_it_be(:too_long_url) { "http://localhost/#{'bananas' * 50}" }
let(:metadata) { { package_name: package_name, package_version: package_version, license_url: too_long_url } }
before do
allow(service).to receive(:metadata).and_return(metadata)
end
it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
end
end
context 'with nuspec file with dependencies' do
let(:nuspec_filepath) { 'packages/nuget/with_dependencies.nuspec' }
let(:package_name) { 'Test.Package' }
let(:package_version) { '3.5.2' }
let(:package_file_name) { 'test.package.3.5.2.nupkg' }
before do
allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
allow(service)
.to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
end
end
it 'updates package and package file', :aggregate_failures do
expect { subject }
.to not_change { ::Packages::Package.count }
.and change { Packages::Dependency.count }.by(4)
.and change { Packages::DependencyLink.count }.by(4)
.and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(2)
expect(package.reload.name).to eq(package_name)
expect(package.version).to eq(package_version)
expect(package).to be_default
expect(package_file.reload.file_name).to eq(package_file_name)
# hard reset needed to properly reload package_file.file
expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
end
end
context 'with package file not containing a nuspec file' do
before do
allow_next_instance_of(Zip::File) do |file|
allow(file).to receive(:glob).and_return([])
end
end
it_behaves_like 'raising an', ::Packages::Nuget::MetadataExtractionService::ExtractionError
end
context 'with a symbol package' do
let(:package_file) { package.package_files.last }
let(:package_file_name) { 'dummyproject.dummypackage.1.0.0.snupkg' }
context 'with no existing package' do
let(:package_id) { package.id }
it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
end
context 'with existing package' do
let!(:existing_package) { create(:nuget_package, project: package.project, name: package_name, version: package_version) }
let(:package_id) { existing_package.id }
it 'link existing package and updates package file', :aggregate_failures do
expect(service).to receive(:try_obtain_lease).and_call_original
expect(::Packages::Nuget::SyncMetadatumService).not_to receive(:new)
expect(::Packages::UpdateTagsService).not_to receive(:new)
expect { subject }
.to change { ::Packages::Package.count }.by(-1)
.and change { Packages::Dependency.count }.by(0)
.and change { Packages::DependencyLink.count }.by(0)
.and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
.and change { ::Packages::Nuget::Metadatum.count }.by(0)
expect(package_file.reload.file_name).to eq(package_file_name)
expect(package_file.package).to eq(existing_package)
end
it_behaves_like 'taking the lease'
it_behaves_like 'not updating the package if the lease is taken'
end
end
context 'with an invalid package name' do
invalid_names = [
'',
'My/package',
'../../../my_package',
'%2e%2e%2fmy_package'
]
invalid_names.each do |invalid_name|
before do
allow(service).to receive(:package_name).and_return(invalid_name)
end
it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
end
end
context 'with an invalid package version' do
invalid_versions = [
'',
'555',
'1.2',
'1./2.3',
'../../../../../1.2.3',
'%2e%2e%2f1.2.3'
]
invalid_versions.each do |invalid_version|
before do
allow(service).to receive(:package_version).and_return(invalid_version)
end
it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
end
end
end
context 'with an invalid package version' do
invalid_versions = [
'',
'555',
'1.2',
'1./2.3',
'../../../../../1.2.3',
'%2e%2e%2f1.2.3'
]
invalid_versions.each do |invalid_version|
before do
allow(service).to receive(:package_version).and_return(invalid_version)
end
it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
context 'with packages_nuget_new_package_file_updater enabled' do
before do
expect(service).not_to receive(:legacy_execute)
end
it_behaves_like 'handling all conditions'
end
context 'with packages_nuget_new_package_file_updater disabled' do
before do
stub_feature_flags(packages_nuget_new_package_file_updater: false)
expect(::Packages::UpdatePackageFileService)
.not_to receive(:new).with(package_file, instance_of(Hash)).and_call_original
expect(service).not_to receive(:new_execute)
end
it_behaves_like 'handling all conditions'
end
end
end

View File

@ -0,0 +1,119 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Packages::UpdatePackageFileService do
let_it_be(:another_package) { create(:package) }
let_it_be(:old_file_name) { 'old_file_name.txt' }
let_it_be(:new_file_name) { 'new_file_name.txt' }
let(:package) { package_file.package }
let(:params) { { package_id: another_package.id, file_name: new_file_name } }
let(:service) { described_class.new(package_file, params) }
describe '#execute' do
subject { service.execute }
shared_examples 'updating package file with valid parameters' do
context 'with both parameters set' do
it 'updates the package file accordingly' do
expect { subject }
.to change { package.package_files.count }.from(1).to(0)
.and change { another_package.package_files.count }.from(0).to(1)
.and change { package_file.package_id }.from(package.id).to(another_package.id)
.and change { package_file.file_name }.from(old_file_name).to(new_file_name)
end
end
context 'with only file_name set' do
let(:params) { { file_name: new_file_name } }
it 'updates the package file accordingly' do
expect { subject }
.to not_change { package.package_files.count }
.and not_change { another_package.package_files.count }
.and not_change { package_file.package_id }
.and change { package_file.file_name }.from(old_file_name).to(new_file_name)
end
end
context 'with only package_id set' do
let(:params) { { package_id: another_package.id } }
it 'updates the package file accordingly' do
expect { subject }
.to change { package.package_files.count }.from(1).to(0)
.and change { another_package.package_files.count }.from(0).to(1)
.and change { package_file.package_id }.from(package.id).to(another_package.id)
.and not_change { package_file.file_name }
end
end
end
shared_examples 'not updating package with invalid parameters' do
context 'with blank parameters' do
let(:params) { {} }
it 'raise an argument error' do
expect { subject }.to raise_error(ArgumentError, 'package_id and file_name are blank')
end
end
context 'with non persisted package file' do
let(:package_file) { build(:package_file) }
it 'raise an argument error' do
expect { subject }.to raise_error(ArgumentError, 'package_file not persisted')
end
end
end
context 'with object storage disabled' do
let(:package_file) { create(:package_file, file_name: old_file_name) }
before do
stub_package_file_object_storage(enabled: false)
end
it_behaves_like 'updating package file with valid parameters' do
before do
expect(package_file).to receive(:remove_previously_stored_file).and_call_original
expect(package_file).not_to receive(:move_in_object_storage)
end
end
it_behaves_like 'not updating package with invalid parameters'
end
context 'with object storage enabled' do
let(:package_file) do
create(
:package_file,
file_name: old_file_name,
file: CarrierWaveStringFile.new_file(
file_content: 'content',
filename: old_file_name,
content_type: 'text/plain'
),
file_store: ::Packages::PackageFileUploader::Store::REMOTE
)
end
before do
stub_package_file_object_storage(enabled: true)
end
it_behaves_like 'updating package file with valid parameters' do
before do
expect(package_file).not_to receive(:remove_previously_stored_file)
expect(package_file).to receive(:move_in_object_storage).and_call_original
end
end
it_behaves_like 'not updating package with invalid parameters' do
before do
expect(package_file.file.file).not_to receive(:copy_to)
end
end
end
end
end

View File

@ -0,0 +1,260 @@
# frozen_string_literal: true
require 'spec_helper'
# rubocop: disable RSpec/MultipleMemoizedHelpers
RSpec.describe Security::MergeReportsService, '#execute' do
let(:scanner_1) { build(:ci_reports_security_scanner, external_id: 'scanner-1', name: 'Scanner 1') }
let(:scanner_2) { build(:ci_reports_security_scanner, external_id: 'scanner-2', name: 'Scanner 2') }
let(:scanner_3) { build(:ci_reports_security_scanner, external_id: 'scanner-3', name: 'Scanner 3') }
let(:identifier_1_primary) { build(:ci_reports_security_identifier, external_id: 'VULN-1', external_type: 'scanner-1') }
let(:identifier_1_cve) { build(:ci_reports_security_identifier, external_id: 'CVE-2019-123', external_type: 'cve') }
let(:identifier_2_primary) { build(:ci_reports_security_identifier, external_id: 'VULN-2', external_type: 'scanner-2') }
let(:identifier_2_cve) { build(:ci_reports_security_identifier, external_id: 'CVE-2019-456', external_type: 'cve') }
let(:identifier_cwe) { build(:ci_reports_security_identifier, external_id: '789', external_type: 'cwe') }
let(:identifier_wasc) { build(:ci_reports_security_identifier, external_id: '13', external_type: 'wasc') }
let(:finding_id_1) do
build(:ci_reports_security_finding,
identifiers: [identifier_1_primary, identifier_1_cve],
scanner: scanner_1,
severity: :low
)
end
let(:finding_id_1_extra) do
build(:ci_reports_security_finding,
identifiers: [identifier_1_primary, identifier_1_cve],
scanner: scanner_1,
severity: :low
)
end
let(:finding_id_2_loc_1) do
build(:ci_reports_security_finding,
identifiers: [identifier_2_primary, identifier_2_cve],
location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
scanner: scanner_2,
severity: :medium
)
end
let(:finding_id_2_loc_1_extra) do
build(:ci_reports_security_finding,
identifiers: [identifier_2_primary, identifier_2_cve],
location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
scanner: scanner_2,
severity: :medium
)
end
let(:finding_id_2_loc_2) do
build(:ci_reports_security_finding,
identifiers: [identifier_2_primary, identifier_2_cve],
location: build(:ci_reports_security_locations_sast, start_line: 42, end_line: 44),
scanner: scanner_2,
severity: :medium
)
end
let(:finding_cwe_1) do
build(:ci_reports_security_finding,
identifiers: [identifier_cwe],
scanner: scanner_3,
severity: :high
)
end
let(:finding_cwe_2) do
build(:ci_reports_security_finding,
identifiers: [identifier_cwe],
scanner: scanner_1,
severity: :critical
)
end
let(:finding_wasc_1) do
build(:ci_reports_security_finding,
identifiers: [identifier_wasc],
scanner: scanner_1,
severity: :medium
)
end
let(:finding_wasc_2) do
build(:ci_reports_security_finding,
identifiers: [identifier_wasc],
scanner: scanner_2,
severity: :critical
)
end
let(:report_1_findings) { [finding_id_1, finding_id_2_loc_1, finding_id_2_loc_1_extra, finding_cwe_2, finding_wasc_1] }
let(:scanned_resource) do
::Gitlab::Ci::Reports::Security::ScannedResource.new(URI.parse('example.com'), 'GET')
end
let(:scanned_resource_1) do
::Gitlab::Ci::Reports::Security::ScannedResource.new(URI.parse('example.com'), 'POST')
end
let(:scanned_resource_2) do
::Gitlab::Ci::Reports::Security::ScannedResource.new(URI.parse('example.com/2'), 'GET')
end
let(:scanned_resource_3) do
::Gitlab::Ci::Reports::Security::ScannedResource.new(URI.parse('example.com/3'), 'GET')
end
let(:report_1) do
build(
:ci_reports_security_report,
scanners: [scanner_1, scanner_2],
findings: report_1_findings,
identifiers: report_1_findings.flat_map(&:identifiers),
scanned_resources: [scanned_resource, scanned_resource_1, scanned_resource_2]
)
end
let(:report_2_findings) { [finding_id_2_loc_2, finding_wasc_2] }
let(:report_2) do
build(
:ci_reports_security_report,
scanners: [scanner_2],
findings: report_2_findings,
identifiers: finding_id_2_loc_2.identifiers,
scanned_resources: [scanned_resource, scanned_resource_1, scanned_resource_3]
)
end
let(:report_3_findings) { [finding_id_1_extra, finding_cwe_1] }
let(:report_3) do
build(
:ci_reports_security_report,
scanners: [scanner_1, scanner_3],
findings: report_3_findings,
identifiers: report_3_findings.flat_map(&:identifiers)
)
end
let(:merge_service) { described_class.new(report_1, report_2, report_3) }
subject(:merged_report) { merge_service.execute }
describe 'errors on target report' do
subject { merged_report.errors }
before do
report_1.add_error('foo', 'bar')
report_2.add_error('zoo', 'baz')
end
it { is_expected.to eq([{ type: 'foo', message: 'bar' }, { type: 'zoo', message: 'baz' }]) }
end
it 'copies scanners into target report and eliminates duplicates' do
expect(merged_report.scanners.values).to contain_exactly(scanner_1, scanner_2, scanner_3)
end
it 'copies identifiers into target report and eliminates duplicates' do
expect(merged_report.identifiers.values).to(
contain_exactly(
identifier_1_primary,
identifier_1_cve,
identifier_2_primary,
identifier_2_cve,
identifier_cwe,
identifier_wasc
)
)
end
it 'deduplicates (except cwe and wasc) and sorts the vulnerabilities by severity (desc) then by compare key' do
expect(merged_report.findings).to(
eq([
finding_cwe_2,
finding_wasc_2,
finding_cwe_1,
finding_id_2_loc_2,
finding_id_2_loc_1,
finding_wasc_1,
finding_id_1
])
)
end
it 'deduplicates scanned resources' do
expect(merged_report.scanned_resources).to(
eq([
scanned_resource,
scanned_resource_1,
scanned_resource_2,
scanned_resource_3
])
)
end
context 'ordering reports for sast analyzers' do
let(:bandit_scanner) { build(:ci_reports_security_scanner, external_id: 'bandit', name: 'Bandit') }
let(:semgrep_scanner) { build(:ci_reports_security_scanner, external_id: 'semgrep', name: 'Semgrep') }
let(:identifier_bandit) { build(:ci_reports_security_identifier, external_id: 'B403', external_type: 'bandit_test_id') }
let(:identifier_cve) { build(:ci_reports_security_identifier, external_id: 'CVE-2019-123', external_type: 'cve') }
let(:identifier_semgrep) { build(:ci_reports_security_identifier, external_id: 'rules.bandit.B105', external_type: 'semgrep_id') }
let(:finding_id_1) { build(:ci_reports_security_finding, identifiers: [identifier_bandit, identifier_cve], scanner: bandit_scanner, report_type: :sast) }
let(:finding_id_2) { build(:ci_reports_security_finding, identifiers: [identifier_cve], scanner: semgrep_scanner, report_type: :sast) }
let(:finding_id_3) { build(:ci_reports_security_finding, identifiers: [identifier_semgrep], scanner: semgrep_scanner, report_type: :sast ) }
let(:bandit_report) do
build( :ci_reports_security_report,
type: :sast,
scanners: [bandit_scanner],
findings: [finding_id_1],
identifiers: finding_id_1.identifiers
)
end
let(:semgrep_report) do
build(
:ci_reports_security_report,
type: :sast,
scanners: [semgrep_scanner],
findings: [finding_id_2, finding_id_3],
identifiers: finding_id_2.identifiers + finding_id_3.identifiers
)
end
let(:custom_analyzer_report) do
build(
:ci_reports_security_report,
type: :sast,
scanners: [scanner_2],
findings: [finding_id_2_loc_1],
identifiers: finding_id_2_loc_1.identifiers
)
end
context 'when reports are gathered in an unprioritized order' do
subject(:sast_merged_report) { described_class.new(semgrep_report, bandit_report).execute }
specify { expect(sast_merged_report.scanners.values).to eql([bandit_scanner, semgrep_scanner]) }
specify { expect(sast_merged_report.findings.count).to eq(2) }
specify { expect(sast_merged_report.findings.first.identifiers).to eql([identifier_bandit, identifier_cve]) }
specify { expect(sast_merged_report.findings.last.identifiers).to contain_exactly(identifier_semgrep) }
end
context 'when a custom analyzer is completed before the known analyzers' do
subject(:sast_merged_report) { described_class.new(custom_analyzer_report, semgrep_report, bandit_report).execute }
specify { expect(sast_merged_report.scanners.values).to eql([bandit_scanner, semgrep_scanner, scanner_2]) }
specify { expect(sast_merged_report.findings.count).to eq(3) }
specify { expect(sast_merged_report.findings.last.identifiers).to match_array(finding_id_2_loc_1.identifiers) }
end
end
end
# rubocop: enable RSpec/MultipleMemoizedHelpers

View File

@ -158,18 +158,18 @@ RSpec.shared_examples 'value stream analytics stage' do
it 'creates different hash record for different event configurations' do
expect do
create(factory, start_event_identifier: :issue_created, end_event_identifier: :issue_first_mentioned_in_commit)
create(factory, start_event_identifier: :issue_created, end_event_identifier: :issue_stage_end)
create(factory, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged)
end.to change { Analytics::CycleAnalytics::StageEventHash.count }.from(0).to(2)
end
context 'when the stage event hash changes' do
let(:stage) { create(factory, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged) }
let(:stage) { create(factory, start_event_identifier: :issue_created, end_event_identifier: :issue_stage_end) }
it 'deletes the old, unused stage event hash record' do
old_stage_event_hash = stage.stage_event_hash
stage.update!(end_event_identifier: :merge_request_first_deployed_to_production)
stage.update!(end_event_identifier: :issue_deployed_to_production)
expect(stage.stage_event_hash_id).not_to eq(old_stage_event_hash.id)
@ -178,9 +178,9 @@ RSpec.shared_examples 'value stream analytics stage' do
end
it 'does not delete used stage event hash record' do
other_stage = create(factory, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged)
other_stage = create(factory, start_event_identifier: :issue_created, end_event_identifier: :issue_stage_end)
stage.update!(end_event_identifier: :merge_request_first_deployed_to_production)
stage.update!(end_event_identifier: :issue_deployed_to_production)
expect(stage.stage_event_hash_id).not_to eq(other_stage.stage_event_hash_id)

View File

@ -0,0 +1,85 @@
# frozen_string_literal: true
RSpec.shared_examples 'a service that handles Jira API errors' do
include AfterNextHelpers
using RSpec::Parameterized::TableSyntax
where(:exception_class, :exception_message, :expected_message) do
Errno::ECONNRESET | '' | 'A connection error occurred'
Errno::ECONNREFUSED | '' | 'A connection error occurred'
Errno::ETIMEDOUT | '' | 'A timeout error occurred'
Timeout::Error | '' | 'A timeout error occurred'
URI::InvalidURIError | '' | 'The Jira API URL'
SocketError | '' | 'The Jira API URL'
OpenSSL::SSL::SSLError | 'foo' | 'An SSL error occurred while connecting to Jira: foo'
JIRA::HTTPError | 'Unauthorized' | 'The credentials for accessing Jira are not valid'
JIRA::HTTPError | 'Forbidden' | 'The credentials for accessing Jira are not allowed'
JIRA::HTTPError | 'Bad Request' | 'An error occurred while requesting data from Jira'
JIRA::HTTPError | 'Foo' | 'An error occurred while requesting data from Jira.'
JIRA::HTTPError | '{"errorMessages":["foo","bar"]}' | 'An error occurred while requesting data from Jira: foo and bar'
JIRA::HTTPError | '{"errorMessages":[""]}' | 'An error occurred while requesting data from Jira.'
end
with_them do
it 'handles the error' do
stub_client_and_raise(exception_class, exception_message)
expect(subject).to be_a(ServiceResponse)
expect(subject).to be_error
expect(subject.message).to include(expected_message)
end
end
context 'when the JSON in JIRA::HTTPError is unsafe' do
before do
stub_client_and_raise(JIRA::HTTPError, error)
end
context 'when JSON is malformed' do
let(:error) { '{"errorMessages":' }
it 'returns the default error message' do
expect(subject.message).to eq('An error occurred while requesting data from Jira. Check your Jira integration configuration and try again.')
end
end
context 'when JSON contains tags' do
let(:error) { '{"errorMessages":["<script>alert(true)</script>foo"]}' }
it 'sanitizes it' do
expect(subject.message).to eq('An error occurred while requesting data from Jira: foo. Check your Jira integration configuration and try again.')
end
end
end
it 'allows unknown exception classes to bubble' do
stub_client_and_raise(StandardError)
expect { subject }.to raise_exception(StandardError)
end
it 'logs the error' do
stub_client_and_raise(Timeout::Error, 'foo')
expect(Gitlab::ProjectServiceLogger).to receive(:error).with(
hash_including(
client_url: be_present,
message: 'Error sending message',
service_class: described_class.name,
error: hash_including(
exception_class: Timeout::Error.name,
exception_message: 'foo',
exception_backtrace: be_present
)
)
)
expect(subject).to be_error
end
def stub_client_and_raise(exception_class, message = '')
# `JIRA::HTTPError` classes take a response from the JIRA API, rather than a `String`.
message = double(body: message) if exception_class == JIRA::HTTPError
allow_next(JIRA::Client).to receive(:get).and_raise(exception_class, message)
end
end