2019-07-25 01:27:42 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-08-16 09:04:41 -04:00
|
|
|
module GraphqlHelpers
|
2021-06-15 08:10:11 -04:00
|
|
|
def self.included(base)
|
|
|
|
base.include(::Gitlab::Graphql::Laziness)
|
|
|
|
end
|
|
|
|
|
2018-07-10 10:19:45 -04:00
|
|
|
MutationDefinition = Struct.new(:query, :variables)
|
|
|
|
|
2019-07-29 15:25:41 -04:00
|
|
|
NoData = Class.new(StandardError)
|
2020-11-26 07:09:48 -05:00
|
|
|
UnauthorizedObject = Class.new(StandardError)
|
2019-07-29 15:25:41 -04:00
|
|
|
|
2020-12-01 07:09:17 -05:00
|
|
|
def graphql_args(**values)
|
|
|
|
::Graphql::Arguments.new(values)
|
|
|
|
end
|
|
|
|
|
2018-05-23 03:55:14 -04:00
|
|
|
# makes an underscored string look like a fieldname
|
|
|
|
# "merge_request" => "mergeRequest"
|
|
|
|
def self.fieldnamerize(underscored_field_name)
|
2019-06-21 00:45:27 -04:00
|
|
|
underscored_field_name.to_s.camelize(:lower)
|
2018-05-23 03:55:14 -04:00
|
|
|
end
|
|
|
|
|
2021-03-03 04:10:53 -05:00
|
|
|
def self.deep_fieldnamerize(map)
|
|
|
|
map.to_h do |k, v|
|
|
|
|
[fieldnamerize(k), v.is_a?(Hash) ? deep_fieldnamerize(v) : v]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-06-01 14:09:44 -04:00
|
|
|
# Some arguments use `as:` to expose a different name internally.
|
|
|
|
# Transform the args to use those names
|
|
|
|
def self.deep_transform_args(args, field)
|
|
|
|
args.to_h do |k, v|
|
|
|
|
argument = field.arguments[k.to_s.camelize(:lower)]
|
|
|
|
[argument.keyword, v.is_a?(Hash) ? deep_transform_args(v, argument.type) : v]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Convert incoming args into the form usually passed in from the client,
|
|
|
|
# all strings, etc.
|
|
|
|
def self.as_graphql_argument_literals(args)
|
|
|
|
args.transform_values { |value| transform_arg_value(value) }
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.transform_arg_value(value)
|
|
|
|
case value
|
|
|
|
when Hash
|
|
|
|
as_graphql_argument_literals(value)
|
|
|
|
when Array
|
|
|
|
value.map { |x| transform_arg_value(x) }
|
|
|
|
when Time, ActiveSupport::TimeWithZone
|
|
|
|
value.strftime("%F %T.%N %z")
|
|
|
|
when Date, GlobalID, Symbol
|
|
|
|
value.to_s
|
|
|
|
else
|
|
|
|
value
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-03 04:10:53 -05:00
|
|
|
# Run this resolver exactly as it would be called in the framework. This
|
|
|
|
# includes all authorization hooks, all argument processing and all result
|
|
|
|
# wrapping.
|
|
|
|
# see: GraphqlHelpers#resolve_field
|
2022-06-01 14:09:44 -04:00
|
|
|
#
|
|
|
|
# TODO: this is too coupled to gem internals, making upgrades incredibly
|
|
|
|
# painful, and bypasses much of the validation of the framework.
|
|
|
|
# See https://gitlab.com/gitlab-org/gitlab/-/issues/363121
|
2021-03-03 04:10:53 -05:00
|
|
|
def resolve(
|
|
|
|
resolver_class, # [Class[<= BaseResolver]] The resolver at test.
|
|
|
|
obj: nil, # [Any] The BaseObject#object for the resolver (available as `#object` in the resolver).
|
|
|
|
args: {}, # [Hash] The arguments to the resolver (using client names).
|
|
|
|
ctx: {}, # [#to_h] The current context values.
|
|
|
|
schema: GitlabSchema, # [GraphQL::Schema] Schema to use during execution.
|
|
|
|
parent: :not_given, # A GraphQL query node to be passed as the `:parent` extra.
|
2022-06-01 14:09:44 -04:00
|
|
|
lookahead: :not_given, # A GraphQL lookahead object to be passed as the `:lookahead` extra.
|
2022-06-19 05:08:36 -04:00
|
|
|
arg_style: :internal_prepared # Args are in internal format, but should use more rigorous processing
|
2021-03-03 04:10:53 -05:00
|
|
|
)
|
|
|
|
# All resolution goes through fields, so we need to create one here that
|
|
|
|
# uses our resolver. Thankfully, apart from the field name, resolvers
|
|
|
|
# contain all the configuration needed to define one.
|
2021-03-11 07:09:28 -05:00
|
|
|
field_options = resolver_class.field_options.merge(
|
|
|
|
owner: resolver_parent,
|
|
|
|
name: 'field_value'
|
|
|
|
)
|
2021-03-03 04:10:53 -05:00
|
|
|
field = ::Types::BaseField.new(**field_options)
|
|
|
|
|
|
|
|
# All mutations accept a single `:input` argument. Wrap arguments here.
|
|
|
|
args = { input: args } if resolver_class <= ::Mutations::BaseMutation && !args.key?(:input)
|
|
|
|
|
|
|
|
resolve_field(field, obj,
|
|
|
|
args: args,
|
|
|
|
ctx: ctx,
|
|
|
|
schema: schema,
|
|
|
|
object_type: resolver_parent,
|
2022-06-01 14:09:44 -04:00
|
|
|
extras: { parent: parent, lookahead: lookahead },
|
|
|
|
arg_style: arg_style)
|
2021-03-03 04:10:53 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
# Resolve the value of a field on an object.
|
|
|
|
#
|
|
|
|
# Use this method to test individual fields within type specs.
|
|
|
|
#
|
|
|
|
# e.g.
|
|
|
|
#
|
|
|
|
# issue = create(:issue)
|
|
|
|
# user = issue.author
|
|
|
|
# project = issue.project
|
2020-05-21 14:08:27 -04:00
|
|
|
#
|
2021-03-03 04:10:53 -05:00
|
|
|
# resolve_field(:author, issue, current_user: user, object_type: ::Types::IssueType)
|
|
|
|
# resolve_field(:issue, project, args: { iid: issue.iid }, current_user: user, object_type: ::Types::ProjectType)
|
2020-05-21 14:08:27 -04:00
|
|
|
#
|
2021-03-03 04:10:53 -05:00
|
|
|
# The `object_type` defaults to the `described_class`, so when called from type specs,
|
|
|
|
# the above can be written as:
|
|
|
|
#
|
|
|
|
# # In project_type_spec.rb
|
|
|
|
# resolve_field(:author, issue, current_user: user)
|
|
|
|
#
|
|
|
|
# # In issue_type_spec.rb
|
|
|
|
# resolve_field(:issue, project, args: { iid: issue.iid }, current_user: user)
|
|
|
|
#
|
|
|
|
# NB: Arguments are passed from the client's perspective. If there is an argument
|
|
|
|
# `foo` aliased as `bar`, then we would pass `args: { bar: the_value }`, and
|
|
|
|
# types are checked before resolution.
|
2022-06-01 14:09:44 -04:00
|
|
|
# rubocop:disable Metrics/ParameterLists
|
2021-03-03 04:10:53 -05:00
|
|
|
def resolve_field(
|
2022-06-01 14:09:44 -04:00
|
|
|
field, # An instance of `BaseField`, or the name of a field on the current described_class
|
|
|
|
object, # The current object of the `BaseObject` this field 'belongs' to
|
|
|
|
args: {}, # Field arguments (keys will be fieldnamerized)
|
|
|
|
ctx: {}, # Context values (important ones are :current_user)
|
|
|
|
extras: {}, # Stub values for field extras (parent and lookahead)
|
|
|
|
current_user: :not_given, # The current user (specified explicitly, overrides ctx[:current_user])
|
|
|
|
schema: GitlabSchema, # A specific schema instance
|
|
|
|
object_type: described_class, # The `BaseObject` type this field belongs to
|
2022-06-19 05:08:36 -04:00
|
|
|
arg_style: :internal_prepared # Args are in internal format, but should use more rigorous processing
|
2021-03-03 04:10:53 -05:00
|
|
|
)
|
|
|
|
field = to_base_field(field, object_type)
|
|
|
|
ctx[:current_user] = current_user unless current_user == :not_given
|
|
|
|
query = GraphQL::Query.new(schema, context: ctx.to_h)
|
|
|
|
extras[:lookahead] = negative_lookahead if extras[:lookahead] == :not_given && field.extras.include?(:lookahead)
|
|
|
|
query_ctx = query.context
|
|
|
|
|
|
|
|
mock_extras(query_ctx, **extras)
|
|
|
|
|
|
|
|
parent = object_type.authorized_new(object, query_ctx)
|
|
|
|
raise UnauthorizedObject unless parent
|
|
|
|
|
|
|
|
# we enable the request store so we can track gitaly calls.
|
|
|
|
::Gitlab::WithRequestStore.with_request_store do
|
2022-06-01 14:09:44 -04:00
|
|
|
prepared_args = case arg_style
|
|
|
|
when :internal_prepared
|
|
|
|
args_internal_prepared(field, args: args, query_ctx: query_ctx, parent: parent, extras: extras, query: query)
|
|
|
|
else
|
|
|
|
args_internal(field, args: args, query_ctx: query_ctx, parent: parent, extras: extras, query: query)
|
|
|
|
end
|
|
|
|
|
|
|
|
if prepared_args.class <= Gitlab::Graphql::Errors::BaseError
|
|
|
|
prepared_args
|
|
|
|
else
|
|
|
|
field.resolve(parent, prepared_args, query_ctx)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
# rubocop:enable Metrics/ParameterLists
|
2020-05-21 14:08:27 -04:00
|
|
|
|
2022-06-01 14:09:44 -04:00
|
|
|
# Pros:
|
2022-06-19 05:08:36 -04:00
|
|
|
# - Original way we handled arguments
|
2022-06-01 14:09:44 -04:00
|
|
|
#
|
|
|
|
# Cons:
|
|
|
|
# - the `prepare` method of a type is not called. Whether as a proc or as a method
|
|
|
|
# on the type, it's not called. For example `:cluster_id` in ee/app/graphql/resolvers/vulnerabilities_resolver.rb,
|
|
|
|
# or `prepare` in app/graphql/types/range_input_type.rb, used by Types::TimeframeInputType
|
|
|
|
def args_internal(field, args:, query_ctx:, parent:, extras:, query:)
|
|
|
|
arguments = GraphqlHelpers.deep_transform_args(args, field)
|
2022-08-17 05:11:44 -04:00
|
|
|
arguments.merge!(extras.reject { |k, v| v == :not_given })
|
2022-06-01 14:09:44 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
# Pros:
|
|
|
|
# - Allows the use of ruby types, without having to pass in strings
|
2022-06-19 05:08:36 -04:00
|
|
|
# - All args are converted into strings just like if it was called from a client
|
|
|
|
# - Much stronger argument verification
|
2022-06-01 14:09:44 -04:00
|
|
|
#
|
|
|
|
# Cons:
|
|
|
|
# - Some values, such as enums, would need to be changed in the specs to use the
|
|
|
|
# external values, because there is no easy way to handle them.
|
|
|
|
#
|
|
|
|
# take internal style args, and force them into client style args
|
|
|
|
def args_internal_prepared(field, args:, query_ctx:, parent:, extras:, query:)
|
|
|
|
arguments = GraphqlHelpers.as_graphql_argument_literals(args)
|
2022-08-17 05:11:44 -04:00
|
|
|
arguments.merge!(extras.reject { |k, v| v == :not_given })
|
2022-06-01 14:09:44 -04:00
|
|
|
|
|
|
|
# Use public API to properly prepare the args for use by the resolver.
|
|
|
|
# It uses `coerce_arguments` under the covers
|
|
|
|
prepared_args = nil
|
|
|
|
query.arguments_cache.dataload_for(GraphqlHelpers.deep_fieldnamerize(arguments), field, parent) do |kwarg_arguments|
|
|
|
|
prepared_args = kwarg_arguments
|
2021-03-03 04:10:53 -05:00
|
|
|
end
|
2022-06-01 14:09:44 -04:00
|
|
|
|
|
|
|
prepared_args.respond_to?(:keyword_arguments) ? prepared_args.keyword_arguments : prepared_args
|
2017-08-16 09:04:41 -04:00
|
|
|
end
|
|
|
|
|
2021-03-03 04:10:53 -05:00
|
|
|
def mock_extras(context, parent: :not_given, lookahead: :not_given)
|
|
|
|
allow(context).to receive(:parent).and_return(parent) unless parent == :not_given
|
|
|
|
allow(context).to receive(:lookahead).and_return(lookahead) unless lookahead == :not_given
|
|
|
|
end
|
2020-11-26 07:09:48 -05:00
|
|
|
|
2021-03-03 04:10:53 -05:00
|
|
|
# a synthetic BaseObject type to be used in resolver specs. See `GraphqlHelpers#resolve`
|
|
|
|
def resolver_parent
|
|
|
|
@resolver_parent ||= fresh_object_type('ResolverParent')
|
|
|
|
end
|
|
|
|
|
|
|
|
def fresh_object_type(name = 'Object')
|
|
|
|
Class.new(::Types::BaseObject) { graphql_name name }
|
2020-11-26 07:09:48 -05:00
|
|
|
end
|
|
|
|
|
2021-04-23 05:10:03 -04:00
|
|
|
def resolver_instance(resolver_class, obj: nil, ctx: {}, field: nil, schema: GitlabSchema, subscription_update: false)
|
2020-11-13 13:09:11 -05:00
|
|
|
if ctx.is_a?(Hash)
|
2022-06-01 14:09:44 -04:00
|
|
|
q = double('Query', schema: schema, subscription_update?: subscription_update, warden: GraphQL::Schema::Warden::PassThruWarden)
|
2020-11-13 13:09:11 -05:00
|
|
|
ctx = GraphQL::Query::Context.new(query: q, object: obj, values: ctx)
|
|
|
|
end
|
|
|
|
|
|
|
|
resolver_class.new(object: obj, context: ctx, field: field)
|
|
|
|
end
|
|
|
|
|
2020-01-22 19:08:53 -05:00
|
|
|
# Eagerly run a loader's named resolver
|
|
|
|
# (syncs any lazy values returned by resolve)
|
|
|
|
def eager_resolve(resolver_class, **opts)
|
|
|
|
sync(resolve(resolver_class, **opts))
|
|
|
|
end
|
|
|
|
|
|
|
|
def sync(value)
|
|
|
|
if GitlabSchema.lazy?(value)
|
|
|
|
GitlabSchema.sync_lazy(value)
|
|
|
|
else
|
|
|
|
value
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-12-21 10:10:05 -05:00
|
|
|
def with_clean_batchloader_executor(&block)
|
|
|
|
BatchLoader::Executor.ensure_current
|
|
|
|
yield
|
|
|
|
ensure
|
|
|
|
BatchLoader::Executor.clear_current
|
|
|
|
end
|
|
|
|
|
2018-02-23 10:36:40 -05:00
|
|
|
# Runs a block inside a BatchLoader::Executor wrapper
|
2017-08-16 09:04:41 -04:00
|
|
|
def batch(max_queries: nil, &blk)
|
2020-12-21 10:10:05 -05:00
|
|
|
wrapper = -> { with_clean_batchloader_executor(&blk) }
|
2018-05-21 03:52:24 -04:00
|
|
|
|
2017-08-16 09:04:41 -04:00
|
|
|
if max_queries
|
|
|
|
result = nil
|
|
|
|
expect { result = wrapper.call }.not_to exceed_query_limit(max_queries)
|
|
|
|
result
|
|
|
|
else
|
|
|
|
wrapper.call
|
|
|
|
end
|
|
|
|
end
|
2018-05-21 03:52:24 -04:00
|
|
|
|
2020-12-21 10:10:05 -05:00
|
|
|
# Use this when writing N+1 tests.
|
|
|
|
#
|
|
|
|
# It does not use the controller, so it avoids confounding factors due to
|
|
|
|
# authentication (token set-up, license checks)
|
|
|
|
# It clears the request store, rails cache, and BatchLoader Executor between runs.
|
|
|
|
def run_with_clean_state(query, **args)
|
|
|
|
::Gitlab::WithRequestStore.with_request_store do
|
|
|
|
with_clean_rails_cache do
|
|
|
|
with_clean_batchloader_executor do
|
|
|
|
::GitlabSchema.execute(query, **args)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Basically a combination of use_sql_query_cache and use_clean_rails_memory_store_caching,
|
|
|
|
# but more fine-grained, suitable for comparing two runs in the same example.
|
|
|
|
def with_clean_rails_cache(&blk)
|
|
|
|
caching_store = Rails.cache
|
|
|
|
Rails.cache = ActiveSupport::Cache::MemoryStore.new
|
|
|
|
|
|
|
|
ActiveRecord::Base.cache(&blk)
|
|
|
|
ensure
|
|
|
|
Rails.cache = caching_store
|
|
|
|
end
|
|
|
|
|
2019-09-04 13:42:48 -04:00
|
|
|
# BatchLoader::GraphQL returns a wrapper, so we need to :sync in order
|
|
|
|
# to get the actual values
|
|
|
|
def batch_sync(max_queries: nil, &blk)
|
2020-05-21 14:08:27 -04:00
|
|
|
batch(max_queries: max_queries) { sync_all(&blk) }
|
|
|
|
end
|
2019-09-04 13:42:48 -04:00
|
|
|
|
2020-05-21 14:08:27 -04:00
|
|
|
def sync_all(&blk)
|
|
|
|
lazy_vals = yield
|
|
|
|
lazy_vals.is_a?(Array) ? lazy_vals.map { |val| sync(val) } : sync(lazy_vals)
|
2019-09-04 13:42:48 -04:00
|
|
|
end
|
|
|
|
|
2021-03-23 14:09:05 -04:00
|
|
|
def graphql_query_for(name, args = {}, selection = nil, operation_name = nil)
|
2021-02-11 13:09:10 -05:00
|
|
|
type = GitlabSchema.types['Query'].fields[GraphqlHelpers.fieldnamerize(name)]&.type
|
2021-03-23 14:09:05 -04:00
|
|
|
query = wrap_query(query_graphql_field(name, args, selection, type))
|
|
|
|
query = "query #{operation_name}#{query}" if operation_name
|
|
|
|
|
|
|
|
query
|
2021-02-11 13:09:10 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def wrap_query(query)
|
|
|
|
q = query.to_s
|
|
|
|
return q if q.starts_with?('{')
|
|
|
|
|
|
|
|
"{ #{q} }"
|
2018-06-14 09:06:53 -04:00
|
|
|
end
|
|
|
|
|
2020-05-27 20:08:37 -04:00
|
|
|
def graphql_mutation(name, input, fields = nil, &block)
|
2022-08-03 08:11:38 -04:00
|
|
|
raise ArgumentError, 'Please pass either `fields` parameter or a block to `#graphql_mutation`, but not both.' if fields.present? && block
|
2020-05-27 20:08:37 -04:00
|
|
|
|
2022-05-05 11:08:47 -04:00
|
|
|
name = name.graphql_name if name.respond_to?(:graphql_name)
|
2018-07-10 10:19:45 -04:00
|
|
|
mutation_name = GraphqlHelpers.fieldnamerize(name)
|
|
|
|
input_variable_name = "$#{input_variable_name_for_mutation(name)}"
|
|
|
|
mutation_field = GitlabSchema.mutation.fields[mutation_name]
|
2020-05-27 20:08:37 -04:00
|
|
|
|
2022-08-03 08:11:38 -04:00
|
|
|
fields = yield if block
|
2022-05-11 02:08:03 -04:00
|
|
|
fields ||= all_graphql_fields_for(mutation_field.type.to_type_signature)
|
2018-07-10 10:19:45 -04:00
|
|
|
|
|
|
|
query = <<~MUTATION
|
2022-05-11 02:08:03 -04:00
|
|
|
mutation(#{input_variable_name}: #{mutation_field.arguments['input'].type.to_type_signature}) {
|
2018-07-10 10:19:45 -04:00
|
|
|
#{mutation_name}(input: #{input_variable_name}) {
|
|
|
|
#{fields}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
MUTATION
|
|
|
|
variables = variables_for_mutation(name, input)
|
|
|
|
|
|
|
|
MutationDefinition.new(query, variables)
|
|
|
|
end
|
|
|
|
|
|
|
|
def variables_for_mutation(name, input)
|
2022-05-05 11:08:47 -04:00
|
|
|
graphql_input = prepare_variables(input)
|
2019-07-01 00:34:34 -04:00
|
|
|
|
2020-11-26 07:09:48 -05:00
|
|
|
{ input_variable_name_for_mutation(name) => graphql_input }
|
|
|
|
end
|
2019-04-22 12:07:19 -04:00
|
|
|
|
2020-11-26 07:09:48 -05:00
|
|
|
def serialize_variables(variables)
|
|
|
|
return unless variables
|
|
|
|
return variables if variables.is_a?(String)
|
|
|
|
|
2022-05-05 11:08:47 -04:00
|
|
|
# Combine variables into a single hash.
|
|
|
|
hash = ::Gitlab::Utils::MergeHash.merge(Array.wrap(variables).map(&:to_h))
|
|
|
|
|
|
|
|
prepare_variables(hash).to_json
|
2018-07-10 10:19:45 -04:00
|
|
|
end
|
|
|
|
|
2022-05-05 11:08:47 -04:00
|
|
|
# Recursively convert any ruby object we can pass as a variable value
|
|
|
|
# to an object we can serialize with JSON, using fieldname-style keys
|
2019-07-01 00:34:34 -04:00
|
|
|
#
|
2022-05-05 11:08:47 -04:00
|
|
|
# prepare_variables({ 'my_key' => 1 })
|
|
|
|
# => { 'myKey' => 1 }
|
|
|
|
# prepare_variables({ enums: [:FOO, :BAR], user_id: global_id_of(user) })
|
|
|
|
# => { 'enums' => ['FOO', 'BAR'], 'userId' => "gid://User/123" }
|
|
|
|
# prepare_variables({ nested: { hash_values: { are_supported: true } } })
|
|
|
|
# => { 'nested' => { 'hashValues' => { 'areSupported' => true } } }
|
|
|
|
def prepare_variables(input)
|
|
|
|
return input.map { prepare_variables(_1) } if input.is_a?(Array)
|
|
|
|
return input.to_s if input.is_a?(GlobalID) || input.is_a?(Symbol)
|
|
|
|
return input unless input.is_a?(Hash)
|
2019-07-01 00:34:34 -04:00
|
|
|
|
2022-05-05 11:08:47 -04:00
|
|
|
input.to_h do |name, value|
|
|
|
|
[GraphqlHelpers.fieldnamerize(name), prepare_variables(value)]
|
2021-04-12 05:09:09 -04:00
|
|
|
end
|
2019-07-01 00:34:34 -04:00
|
|
|
end
|
|
|
|
|
2018-07-10 10:19:45 -04:00
|
|
|
def input_variable_name_for_mutation(mutation_name)
|
|
|
|
mutation_name = GraphqlHelpers.fieldnamerize(mutation_name)
|
|
|
|
mutation_field = GitlabSchema.mutation.fields[mutation_name]
|
2022-05-11 02:08:03 -04:00
|
|
|
input_type = mutation_field.arguments['input'].type.unwrap.to_type_signature
|
2018-07-10 10:19:45 -04:00
|
|
|
|
|
|
|
GraphqlHelpers.fieldnamerize(input_type)
|
|
|
|
end
|
|
|
|
|
2020-03-18 14:09:35 -04:00
|
|
|
def field_with_params(name, attributes = {})
|
|
|
|
namerized = GraphqlHelpers.fieldnamerize(name.to_s)
|
|
|
|
return "#{namerized}" if attributes.blank?
|
|
|
|
|
|
|
|
field_params = if attributes.is_a?(Hash)
|
2020-01-08 16:08:08 -05:00
|
|
|
"(#{attributes_to_graphql(attributes)})"
|
|
|
|
else
|
2020-03-18 14:09:35 -04:00
|
|
|
"(#{attributes})"
|
2020-01-08 16:08:08 -05:00
|
|
|
end
|
|
|
|
|
2020-03-18 14:09:35 -04:00
|
|
|
"#{namerized}#{field_params}"
|
|
|
|
end
|
|
|
|
|
2021-02-11 13:09:10 -05:00
|
|
|
def query_graphql_field(name, attributes = {}, fields = nil, type = nil)
|
|
|
|
type ||= name.to_s.classify
|
2021-04-19 17:09:27 -04:00
|
|
|
if fields.nil? && !attributes.is_a?(Hash)
|
|
|
|
fields = attributes
|
|
|
|
attributes = nil
|
|
|
|
end
|
2020-11-26 07:09:48 -05:00
|
|
|
|
|
|
|
field = field_with_params(name, attributes)
|
|
|
|
|
2021-02-11 13:09:10 -05:00
|
|
|
field + wrap_fields(fields || all_graphql_fields_for(type)).to_s
|
2020-11-26 07:09:48 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def page_info_selection
|
|
|
|
"pageInfo { hasNextPage hasPreviousPage endCursor startCursor }"
|
|
|
|
end
|
|
|
|
|
|
|
|
def query_nodes(name, fields = nil, args: nil, of: name, include_pagination_info: false, max_depth: 1)
|
|
|
|
fields ||= all_graphql_fields_for(of.to_s.classify, max_depth: max_depth)
|
|
|
|
node_selection = include_pagination_info ? "#{page_info_selection} nodes" : :nodes
|
|
|
|
query_graphql_path([[name, args], node_selection], fields)
|
|
|
|
end
|
|
|
|
|
2021-02-01 10:08:56 -05:00
|
|
|
def query_graphql_fragment(name)
|
|
|
|
"... on #{name} { #{all_graphql_fields_for(name)} }"
|
|
|
|
end
|
|
|
|
|
2020-11-26 07:09:48 -05:00
|
|
|
# e.g:
|
|
|
|
# query_graphql_path(%i[foo bar baz], all_graphql_fields_for('Baz'))
|
|
|
|
# => foo { bar { baz { x y z } } }
|
|
|
|
def query_graphql_path(segments, fields = nil)
|
|
|
|
# we really want foldr here...
|
|
|
|
segments.reverse.reduce(fields) do |tail, segment|
|
|
|
|
name, args = Array.wrap(segment)
|
|
|
|
query_graphql_field(name, args, tail)
|
|
|
|
end
|
2018-05-23 03:55:14 -04:00
|
|
|
end
|
|
|
|
|
2022-06-14 20:09:28 -04:00
|
|
|
def query_double(schema: empty_schema)
|
2022-06-01 14:09:44 -04:00
|
|
|
double('query', schema: schema, warden: GraphQL::Schema::Warden::PassThruWarden)
|
2022-04-26 23:10:49 -04:00
|
|
|
end
|
|
|
|
|
2019-03-03 07:46:42 -05:00
|
|
|
def wrap_fields(fields)
|
2020-06-02 11:08:24 -04:00
|
|
|
fields = Array.wrap(fields).map do |field|
|
|
|
|
case field
|
|
|
|
when Symbol
|
|
|
|
GraphqlHelpers.fieldnamerize(field)
|
|
|
|
else
|
|
|
|
field
|
|
|
|
end
|
|
|
|
end.join("\n")
|
|
|
|
|
2019-04-08 15:02:22 -04:00
|
|
|
return unless fields.present?
|
2019-03-03 07:46:42 -05:00
|
|
|
|
|
|
|
<<~FIELDS
|
|
|
|
{
|
|
|
|
#{fields}
|
|
|
|
}
|
|
|
|
FIELDS
|
|
|
|
end
|
|
|
|
|
2022-06-01 14:09:44 -04:00
|
|
|
def all_graphql_fields_for(class_name, max_depth: 3, excluded: [])
|
2019-09-04 13:42:48 -04:00
|
|
|
# pulling _all_ fields can generate a _huge_ query (like complexity 180,000),
|
|
|
|
# and significantly increase spec runtime. so limit the depth by default
|
|
|
|
return if max_depth <= 0
|
|
|
|
|
2019-03-27 16:02:25 -04:00
|
|
|
allow_unlimited_graphql_complexity
|
2020-12-21 10:10:05 -05:00
|
|
|
allow_unlimited_graphql_depth if max_depth > 1
|
2019-08-27 23:47:29 -04:00
|
|
|
allow_high_graphql_recursion
|
2020-01-22 16:08:48 -05:00
|
|
|
allow_high_graphql_transaction_threshold
|
2021-12-06 22:12:22 -05:00
|
|
|
allow_high_graphql_query_size
|
2019-03-27 16:02:25 -04:00
|
|
|
|
2021-02-11 13:09:10 -05:00
|
|
|
type = class_name.respond_to?(:kind) ? class_name : GitlabSchema.types[class_name.to_s]
|
|
|
|
raise "#{class_name} is not a known type in the GitlabSchema" unless type
|
2018-05-21 03:52:24 -04:00
|
|
|
|
2020-12-01 07:09:17 -05:00
|
|
|
# We can't guess arguments, so skip fields that require them
|
|
|
|
skip = ->(name, field) { excluded.include?(name) || required_arguments?(field) }
|
2019-11-15 07:06:12 -05:00
|
|
|
|
2022-06-01 14:09:44 -04:00
|
|
|
::Graphql::FieldSelection.select_fields(type, skip, max_depth)
|
2018-05-23 03:55:14 -04:00
|
|
|
end
|
|
|
|
|
2020-11-26 07:09:48 -05:00
|
|
|
def with_signature(variables, query)
|
2021-02-11 13:09:10 -05:00
|
|
|
%Q[query(#{variables.map(&:sig).join(', ')}) #{wrap_query(query)}]
|
2020-11-26 07:09:48 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def var(type)
|
|
|
|
::Graphql::Var.new(generate(:variable), type)
|
|
|
|
end
|
|
|
|
|
2020-12-01 07:09:17 -05:00
|
|
|
def attributes_to_graphql(arguments)
|
|
|
|
::Graphql::Arguments.new(arguments).to_s
|
2020-01-22 19:08:53 -05:00
|
|
|
end
|
|
|
|
|
2019-05-09 05:27:07 -04:00
|
|
|
def post_multiplex(queries, current_user: nil, headers: {})
|
|
|
|
post api('/', current_user, version: 'graphql'), params: { _json: queries }, headers: headers
|
|
|
|
end
|
|
|
|
|
2021-07-01 17:08:38 -04:00
|
|
|
def get_multiplex(queries, current_user: nil, headers: {})
|
|
|
|
path = "/?#{queries.to_query('_json')}"
|
|
|
|
get api(path, current_user, version: 'graphql'), headers: headers
|
|
|
|
end
|
|
|
|
|
|
|
|
def post_graphql(query, current_user: nil, variables: nil, headers: {}, token: {}, params: {})
|
|
|
|
params = params.merge(query: query, variables: serialize_variables(variables))
|
2021-04-28 11:09:35 -04:00
|
|
|
post api('/', current_user, version: 'graphql', **token), params: params, headers: headers
|
2021-02-11 13:09:10 -05:00
|
|
|
|
2021-04-28 11:09:35 -04:00
|
|
|
return unless graphql_errors
|
|
|
|
|
|
|
|
# Errors are acceptable, but not this one:
|
|
|
|
expect(graphql_errors).not_to include(a_hash_including('message' => 'Internal server error'))
|
2018-07-10 10:19:45 -04:00
|
|
|
end
|
|
|
|
|
2021-07-01 17:08:38 -04:00
|
|
|
def get_graphql(query, current_user: nil, variables: nil, headers: {}, token: {}, params: {})
|
|
|
|
vars = "variables=#{CGI.escape(serialize_variables(variables))}" if variables
|
|
|
|
params = params.to_a.map { |k, v| v.to_query(k) }
|
|
|
|
path = ["/?query=#{CGI.escape(query)}", vars, *params].join('&')
|
|
|
|
get api(path, current_user, version: 'graphql', **token), headers: headers
|
|
|
|
|
|
|
|
return unless graphql_errors
|
|
|
|
|
|
|
|
# Errors are acceptable, but not this one:
|
|
|
|
expect(graphql_errors).not_to include(a_hash_including('message' => 'Internal server error'))
|
|
|
|
end
|
|
|
|
|
2021-04-28 11:09:35 -04:00
|
|
|
def post_graphql_mutation(mutation, current_user: nil, token: {})
|
|
|
|
post_graphql(mutation.query,
|
|
|
|
current_user: current_user,
|
|
|
|
variables: mutation.variables,
|
|
|
|
token: token)
|
2018-05-23 03:55:14 -04:00
|
|
|
end
|
|
|
|
|
2020-09-02 05:10:23 -04:00
|
|
|
def post_graphql_mutation_with_uploads(mutation, current_user: nil)
|
|
|
|
file_paths = file_paths_in_mutation(mutation)
|
|
|
|
params = mutation_to_apollo_uploads_param(mutation, files: file_paths)
|
|
|
|
|
|
|
|
workhorse_post_with_file(api('/', current_user, version: 'graphql'),
|
|
|
|
params: params,
|
|
|
|
file_key: '1'
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def file_paths_in_mutation(mutation)
|
|
|
|
paths = []
|
|
|
|
find_uploads(paths, [], mutation.variables)
|
|
|
|
|
|
|
|
paths
|
|
|
|
end
|
|
|
|
|
|
|
|
# Depth first search for UploadedFile values
|
|
|
|
def find_uploads(paths, path, value)
|
|
|
|
case value
|
|
|
|
when Rack::Test::UploadedFile
|
|
|
|
paths << path
|
|
|
|
when Hash
|
|
|
|
value.each do |k, v|
|
|
|
|
find_uploads(paths, path + [k], v)
|
|
|
|
end
|
|
|
|
when Array
|
|
|
|
value.each_with_index do |v, i|
|
|
|
|
find_uploads(paths, path + [i], v)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-13 09:26:31 -04:00
|
|
|
# this implements GraphQL multipart request v2
|
|
|
|
# https://github.com/jaydenseric/graphql-multipart-request-spec/tree/v2.0.0-alpha.2
|
|
|
|
# this is simplified and do not support file deduplication
|
|
|
|
def mutation_to_apollo_uploads_param(mutation, files: [])
|
|
|
|
operations = { 'query' => mutation.query, 'variables' => mutation.variables }
|
|
|
|
map = {}
|
|
|
|
extracted_files = {}
|
|
|
|
|
|
|
|
files.each_with_index do |file_path, idx|
|
|
|
|
apollo_idx = (idx + 1).to_s
|
|
|
|
parent_dig_path = file_path[0..-2]
|
|
|
|
file_key = file_path[-1]
|
|
|
|
|
|
|
|
parent = operations['variables']
|
|
|
|
parent = parent.dig(*parent_dig_path) unless parent_dig_path.empty?
|
|
|
|
|
|
|
|
extracted_files[apollo_idx] = parent[file_key]
|
|
|
|
parent[file_key] = nil
|
|
|
|
|
|
|
|
map[apollo_idx] = ["variables.#{file_path.join('.')}"]
|
|
|
|
end
|
|
|
|
|
|
|
|
{ operations: operations.to_json, map: map.to_json }.merge(extracted_files)
|
|
|
|
end
|
|
|
|
|
2020-12-01 07:09:17 -05:00
|
|
|
def fresh_response_data
|
|
|
|
Gitlab::Json.parse(response.body)
|
|
|
|
end
|
|
|
|
|
2019-07-29 15:25:41 -04:00
|
|
|
# Raises an error if no data is found
|
2021-02-11 13:09:10 -05:00
|
|
|
# NB: We use fresh_response_data to support tests that make multiple requests.
|
|
|
|
def graphql_data(body = fresh_response_data)
|
2020-12-01 07:09:17 -05:00
|
|
|
body['data'] || (raise NoData, graphql_errors(body))
|
2018-05-23 03:55:14 -04:00
|
|
|
end
|
|
|
|
|
2020-01-22 19:08:53 -05:00
|
|
|
def graphql_data_at(*path)
|
2020-04-24 17:09:48 -04:00
|
|
|
graphql_dig_at(graphql_data, *path)
|
|
|
|
end
|
|
|
|
|
2020-11-26 07:09:48 -05:00
|
|
|
# Slightly more powerful than just `dig`:
|
|
|
|
# - also supports implicit flat-mapping (.e.g. :foo :nodes :bar :nodes)
|
2020-04-24 17:09:48 -04:00
|
|
|
def graphql_dig_at(data, *path)
|
2020-06-11 20:08:47 -04:00
|
|
|
keys = path.map { |segment| segment.is_a?(Integer) ? segment : GraphqlHelpers.fieldnamerize(segment) }
|
|
|
|
|
|
|
|
# Allows for array indexing, like this
|
|
|
|
# ['project', 'boards', 'edges', 0, 'node', 'lists']
|
|
|
|
keys.reduce(data) do |memo, key|
|
2021-11-19 07:12:41 -05:00
|
|
|
if memo.is_a?(Array) && key.is_a?(Integer)
|
|
|
|
memo[key]
|
|
|
|
elsif memo.is_a?(Array)
|
|
|
|
memo.compact.flat_map do |e|
|
|
|
|
x = e[key]
|
|
|
|
x.nil? ? [x] : Array.wrap(x)
|
|
|
|
end
|
2020-11-26 07:09:48 -05:00
|
|
|
else
|
|
|
|
memo&.dig(key)
|
|
|
|
end
|
2020-06-11 20:08:47 -04:00
|
|
|
end
|
2020-01-22 19:08:53 -05:00
|
|
|
end
|
|
|
|
|
2021-11-12 01:10:23 -05:00
|
|
|
def graphql_errors(body = fresh_response_data)
|
2020-12-01 07:09:17 -05:00
|
|
|
case body
|
2019-05-09 05:27:07 -04:00
|
|
|
when Hash # regular query
|
2020-12-01 07:09:17 -05:00
|
|
|
body['errors']
|
2019-05-09 05:27:07 -04:00
|
|
|
when Array # multiplexed queries
|
2020-12-01 07:09:17 -05:00
|
|
|
body.map { |response| response['errors'] }
|
2019-05-09 05:27:07 -04:00
|
|
|
else
|
2020-12-01 07:09:17 -05:00
|
|
|
raise "Unknown GraphQL response type #{body.class}"
|
2019-05-09 05:27:07 -04:00
|
|
|
end
|
2018-07-10 10:19:45 -04:00
|
|
|
end
|
|
|
|
|
2019-08-22 10:17:38 -04:00
|
|
|
def expect_graphql_errors_to_include(regexes_to_match)
|
|
|
|
raise "No errors. Was expecting to match #{regexes_to_match}" if graphql_errors.nil? || graphql_errors.empty?
|
|
|
|
|
|
|
|
error_messages = flattened_errors.collect { |error_hash| error_hash["message"] }
|
|
|
|
Array.wrap(regexes_to_match).flatten.each do |regex|
|
|
|
|
expect(error_messages).to include a_string_matching regex
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def expect_graphql_errors_to_be_empty
|
|
|
|
expect(flattened_errors).to be_empty
|
|
|
|
end
|
|
|
|
|
2022-03-11 07:07:56 -05:00
|
|
|
# Helps migrate to the new GraphQL interpreter,
|
|
|
|
# https://gitlab.com/gitlab-org/gitlab/-/issues/210556
|
2022-06-01 14:09:44 -04:00
|
|
|
def expect_graphql_error_to_be_created(error_class, match_message = '')
|
|
|
|
resolved = yield
|
|
|
|
|
|
|
|
expect(resolved).to be_instance_of(error_class)
|
|
|
|
expect(resolved.message).to match(match_message)
|
2022-03-11 07:07:56 -05:00
|
|
|
end
|
|
|
|
|
2019-08-22 10:17:38 -04:00
|
|
|
def flattened_errors
|
|
|
|
Array.wrap(graphql_errors).flatten.compact
|
|
|
|
end
|
|
|
|
|
2019-07-29 15:25:41 -04:00
|
|
|
# Raises an error if no response is found
|
2018-07-10 10:19:45 -04:00
|
|
|
def graphql_mutation_response(mutation_name)
|
2019-07-29 15:25:41 -04:00
|
|
|
graphql_data.fetch(GraphqlHelpers.fieldnamerize(mutation_name))
|
2018-05-21 03:52:24 -04:00
|
|
|
end
|
|
|
|
|
2020-05-21 20:08:07 -04:00
|
|
|
def scalar_fields_of(type_name)
|
|
|
|
GitlabSchema.types[type_name].fields.map do |name, field|
|
|
|
|
next if nested_fields?(field) || required_arguments?(field)
|
|
|
|
|
|
|
|
name
|
|
|
|
end.compact
|
|
|
|
end
|
|
|
|
|
|
|
|
def nested_fields_of(type_name)
|
|
|
|
GitlabSchema.types[type_name].fields.map do |name, field|
|
|
|
|
next if !nested_fields?(field) || required_arguments?(field)
|
|
|
|
|
|
|
|
[name, field]
|
|
|
|
end.compact
|
|
|
|
end
|
|
|
|
|
2018-06-26 12:31:05 -04:00
|
|
|
def nested_fields?(field)
|
2020-12-01 07:09:17 -05:00
|
|
|
::Graphql::FieldInspection.new(field).nested_fields?
|
2018-06-26 12:31:05 -04:00
|
|
|
end
|
|
|
|
|
2018-05-21 03:52:24 -04:00
|
|
|
def scalar?(field)
|
2020-12-01 07:09:17 -05:00
|
|
|
::Graphql::FieldInspection.new(field).scalar?
|
2018-05-21 03:52:24 -04:00
|
|
|
end
|
|
|
|
|
2018-06-26 12:31:05 -04:00
|
|
|
def enum?(field)
|
2020-12-01 07:09:17 -05:00
|
|
|
::Graphql::FieldInspection.new(field).enum?
|
2018-06-26 12:31:05 -04:00
|
|
|
end
|
|
|
|
|
2020-11-27 07:09:14 -05:00
|
|
|
# There are a few non BaseField fields in our schema (pageInfo for one).
|
|
|
|
# None of them require arguments.
|
2018-06-26 12:31:05 -04:00
|
|
|
def required_arguments?(field)
|
2020-11-27 07:09:14 -05:00
|
|
|
return field.requires_argument? if field.is_a?(::Types::BaseField)
|
|
|
|
|
|
|
|
if (meta = field.try(:metadata)) && meta[:type_class]
|
|
|
|
required_arguments?(meta[:type_class])
|
|
|
|
elsif args = field.try(:arguments)
|
|
|
|
args.values.any? { |argument| argument.type.non_null? }
|
|
|
|
else
|
|
|
|
false
|
|
|
|
end
|
2018-06-26 12:31:05 -04:00
|
|
|
end
|
|
|
|
|
2019-04-22 12:07:19 -04:00
|
|
|
def io_value?(value)
|
|
|
|
Array.wrap(value).any? { |v| v.respond_to?(:to_io) }
|
|
|
|
end
|
|
|
|
|
2018-05-21 03:52:24 -04:00
|
|
|
def field_type(field)
|
2020-12-01 07:09:17 -05:00
|
|
|
::Graphql::FieldInspection.new(field).type
|
2018-05-21 03:52:24 -04:00
|
|
|
end
|
2019-03-27 16:02:25 -04:00
|
|
|
|
|
|
|
# for most tests, we want to allow unlimited complexity
|
|
|
|
def allow_unlimited_graphql_complexity
|
|
|
|
allow_any_instance_of(GitlabSchema).to receive(:max_complexity).and_return nil
|
|
|
|
allow(GitlabSchema).to receive(:max_query_complexity).with(any_args).and_return nil
|
|
|
|
end
|
2019-05-06 10:00:03 -04:00
|
|
|
|
|
|
|
def allow_unlimited_graphql_depth
|
|
|
|
allow_any_instance_of(GitlabSchema).to receive(:max_depth).and_return nil
|
|
|
|
allow(GitlabSchema).to receive(:max_query_depth).with(any_args).and_return nil
|
|
|
|
end
|
2019-08-27 23:47:29 -04:00
|
|
|
|
|
|
|
def allow_high_graphql_recursion
|
2022-06-01 14:09:44 -04:00
|
|
|
allow_any_instance_of(Gitlab::Graphql::QueryAnalyzers::AST::RecursionAnalyzer).to receive(:recursion_threshold).and_return 1000
|
2019-08-27 23:47:29 -04:00
|
|
|
end
|
2019-11-15 07:06:12 -05:00
|
|
|
|
2020-01-22 16:08:48 -05:00
|
|
|
def allow_high_graphql_transaction_threshold
|
|
|
|
stub_const("Gitlab::QueryLimiting::Transaction::THRESHOLD", 1000)
|
|
|
|
end
|
|
|
|
|
2021-12-06 22:12:22 -05:00
|
|
|
def allow_high_graphql_query_size
|
|
|
|
stub_const('GraphqlController::MAX_QUERY_SIZE', 10_000_000)
|
|
|
|
end
|
|
|
|
|
2019-11-15 07:06:12 -05:00
|
|
|
def node_array(data, extract_attribute = nil)
|
|
|
|
data.map do |item|
|
|
|
|
extract_attribute ? item['node'][extract_attribute] : item['node']
|
|
|
|
end
|
|
|
|
end
|
2019-11-21 13:06:26 -05:00
|
|
|
|
2022-05-05 11:08:47 -04:00
|
|
|
def global_id_of(model = nil, id: nil, model_name: nil)
|
2021-01-20 10:10:29 -05:00
|
|
|
if id || model_name
|
2022-05-10 14:08:27 -04:00
|
|
|
::Gitlab::GlobalId.as_global_id(id || model.id, model_name: model_name || model.class.name)
|
2021-01-20 10:10:29 -05:00
|
|
|
else
|
2022-05-10 14:08:27 -04:00
|
|
|
model.to_global_id
|
2021-01-20 10:10:29 -05:00
|
|
|
end
|
2019-11-21 13:06:26 -05:00
|
|
|
end
|
2020-01-08 16:08:08 -05:00
|
|
|
|
|
|
|
def missing_required_argument(path, argument)
|
|
|
|
a_hash_including(
|
|
|
|
'path' => ['query'].concat(path),
|
|
|
|
'extensions' => a_hash_including('code' => 'missingRequiredArguments', 'arguments' => argument.to_s)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def custom_graphql_error(path, msg)
|
|
|
|
a_hash_including('path' => path, 'message' => msg)
|
|
|
|
end
|
2020-01-30 16:08:47 -05:00
|
|
|
|
|
|
|
def type_factory
|
|
|
|
Class.new(Types::BaseObject) do
|
|
|
|
graphql_name 'TestType'
|
|
|
|
|
2021-08-03 17:09:39 -04:00
|
|
|
field :name, GraphQL::Types::String, null: true
|
2020-01-30 16:08:47 -05:00
|
|
|
|
|
|
|
yield(self) if block_given?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def query_factory
|
|
|
|
Class.new(Types::BaseObject) do
|
|
|
|
graphql_name 'TestQuery'
|
|
|
|
|
|
|
|
yield(self) if block_given?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-05-02 14:10:57 -04:00
|
|
|
# assumes query_string and user to be let-bound in the current context
|
2022-05-18 14:08:05 -04:00
|
|
|
def execute_query(query_type = Types::QueryType, schema: empty_schema, graphql: query_string, raise_on_error: false, variables: {})
|
2021-03-03 04:10:53 -05:00
|
|
|
schema.query(query_type)
|
2020-01-30 16:08:47 -05:00
|
|
|
|
2022-05-02 14:10:57 -04:00
|
|
|
r = schema.execute(
|
2021-03-03 04:10:53 -05:00
|
|
|
graphql,
|
2020-01-30 16:08:47 -05:00
|
|
|
context: { current_user: user },
|
2022-05-18 14:08:05 -04:00
|
|
|
variables: variables
|
2020-01-30 16:08:47 -05:00
|
|
|
)
|
2022-05-02 14:10:57 -04:00
|
|
|
|
|
|
|
if raise_on_error && r.to_h['errors'].present?
|
|
|
|
raise NoData, r.to_h['errors']
|
|
|
|
end
|
|
|
|
|
|
|
|
r
|
2020-01-30 16:08:47 -05:00
|
|
|
end
|
2020-11-26 07:09:48 -05:00
|
|
|
|
2021-03-03 04:10:53 -05:00
|
|
|
def empty_schema
|
|
|
|
Class.new(GraphQL::Schema) do
|
|
|
|
use Gitlab::Graphql::Pagination::Connections
|
2022-05-02 14:10:57 -04:00
|
|
|
use BatchLoader::GraphQL
|
2021-03-03 04:10:53 -05:00
|
|
|
|
|
|
|
lazy_resolve ::Gitlab::Graphql::Lazy, :force
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-05-05 11:08:47 -04:00
|
|
|
# Wrapper around a_hash_including that supports unpacking with **
|
|
|
|
class UnpackableMatcher < SimpleDelegator
|
|
|
|
include RSpec::Matchers
|
|
|
|
|
|
|
|
attr_reader :to_hash
|
|
|
|
|
|
|
|
def initialize(hash)
|
|
|
|
@to_hash = hash
|
|
|
|
super(a_hash_including(hash))
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_json(_opts = {})
|
|
|
|
to_hash.to_json
|
|
|
|
end
|
|
|
|
|
|
|
|
def as_json(opts = {})
|
|
|
|
to_hash.as_json(opts)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Construct a matcher for GraphQL entity response objects, of the form
|
|
|
|
# `{ "id" => "some-gid" }`.
|
|
|
|
#
|
|
|
|
# Usage:
|
|
|
|
#
|
|
|
|
# ```ruby
|
|
|
|
# expect(graphql_data_at(:path, :to, :entity)).to match a_graphql_entity_for(user)
|
|
|
|
# ```
|
|
|
|
#
|
|
|
|
# This can be called as:
|
|
|
|
#
|
|
|
|
# ```ruby
|
|
|
|
# a_graphql_entity_for(project, :full_path) # also checks that `entity['fullPath'] == project.full_path
|
|
|
|
# a_graphql_entity_for(project, full_path: 'some/path') # same as above, with explicit values
|
|
|
|
# a_graphql_entity_for(user, :username, foo: 'bar') # combinations of the above
|
|
|
|
# a_graphql_entity_for(foo: 'bar') # if properties are defined, the model is not necessary
|
|
|
|
# ```
|
|
|
|
#
|
|
|
|
# Note that the model instance must not be nil, unless some properties are
|
|
|
|
# explicitly passed in. The following are rejected with `ArgumentError`:
|
|
|
|
#
|
|
|
|
# ```
|
|
|
|
# a_graphql_entity_for(nil, :username)
|
|
|
|
# a_graphql_entity_for(:username)
|
|
|
|
# a_graphql_entity_for
|
|
|
|
# ```
|
|
|
|
#
|
|
|
|
def a_graphql_entity_for(model = nil, *fields, **attrs)
|
|
|
|
raise ArgumentError, 'model is nil' if model.nil? && fields.any?
|
|
|
|
|
|
|
|
attrs.transform_keys! { GraphqlHelpers.fieldnamerize(_1) }
|
2022-05-10 14:08:27 -04:00
|
|
|
attrs['id'] = global_id_of(model).to_s if model
|
2022-05-05 11:08:47 -04:00
|
|
|
fields.each do |name|
|
|
|
|
attrs[GraphqlHelpers.fieldnamerize(name)] = model.public_send(name)
|
|
|
|
end
|
|
|
|
|
|
|
|
raise ArgumentError, 'no attributes' if attrs.empty?
|
|
|
|
|
|
|
|
UnpackableMatcher.new(attrs)
|
|
|
|
end
|
|
|
|
|
2020-11-26 07:09:48 -05:00
|
|
|
# A lookahead that selects everything
|
|
|
|
def positive_lookahead
|
|
|
|
double(selects?: true).tap do |selection|
|
|
|
|
allow(selection).to receive(:selection).and_return(selection)
|
2022-08-30 20:12:33 -04:00
|
|
|
allow(selection).to receive(:selections).and_return(selection)
|
|
|
|
allow(selection).to receive(:map).and_return(double(include?: true))
|
2020-11-26 07:09:48 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# A lookahead that selects nothing
|
|
|
|
def negative_lookahead
|
|
|
|
double(selects?: false).tap do |selection|
|
|
|
|
allow(selection).to receive(:selection).and_return(selection)
|
|
|
|
end
|
|
|
|
end
|
2021-03-03 04:10:53 -05:00
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def to_base_field(name_or_field, object_type)
|
|
|
|
case name_or_field
|
|
|
|
when ::Types::BaseField
|
|
|
|
name_or_field
|
|
|
|
else
|
|
|
|
field_by_name(name_or_field, object_type)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def field_by_name(name, object_type)
|
|
|
|
name = ::GraphqlHelpers.fieldnamerize(name)
|
|
|
|
|
|
|
|
object_type.fields[name] || (raise ArgumentError, "Unknown field #{name} for #{described_class.graphql_name}")
|
|
|
|
end
|
2017-08-16 09:04:41 -04:00
|
|
|
end
|