diff --git a/CHANGELOG.md b/CHANGELOG.md index dbf078d..b1e7d1b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,12 +10,30 @@ the compatibility issues you are likely to encounter. ## [Unreleased] +### Compatibility + +* The error code `param_mismatch` was removed in lieu of + `type_mismatch` due to a rewrite in the type checker of error + handling. +* The error code `non_null` will now report `type_mismatch` instead. +* The error code `enum_not_found` will now be reported as + `unknown_enum` +* If a string literal is given in place of an enum, the error code + will now be `enum_string_literal` rather than `enum_not_found`. + ### Added * Add proper support for OTP release 21 (by getong, 18年梦醒). Detect the OTP 21 version, and if present, use the new stack-trace form. This ensures backwards compatibility as well as proper stack trace handling in new OTP releases. +* New command `graphql:map/2`. Given a `Result` of the form `{ok, Val} | + {defer, Token}` the call to `graphql:map(F, Result)` will apply `F` + to the result. Either now, or in the case of a defer, when the defer + completes. This yields an alternative way to handle events which + cannot be completed right away. Long running work is usually better + handled in a spawned process, but simpler changes can be handled + within the context of the GraphQL process. * New command `graphql:sync/3`. Calling `graphql:sync(Ctx, Pid, Msg)` will place a message into the GraphQL mailbox. When this message occurs, we will send `Pid` a message `Msg`. This is useful for e.g., @@ -28,6 +46,11 @@ the compatibility issues you are likely to encounter. ### Fixed +* Default variable value expansion has been fixed. If you have a + situation where a field-arg has a default value, you have supplied a + parameter for that value, say `foo(x: $k)` and you then omit `$k` in + your query, the underlying default (in this case the default for + `x`) is now picked up properly. * Re-instate the operation type in the callers context * Remove the occurrence of fragment names in `path` components of errors. These are not allowed per the Jun2018 specification and diff --git a/README.md b/README.md index 08ae743..bf3b307 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ specification, except for a few areas: In addition, we are working towards June 2018 compliance. We already implemented many of the changes in the system. But we are still missing some parts. The implementation plan is on a demand driven -basis for Shopgun currently, in that we tend to implement things are +basis for Shopgun currently, in that we tend to implement things when there is a need for them. # Documentation diff --git a/src/graphql.erl b/src/graphql.erl index 99c7201..167b0cf 100644 --- a/src/graphql.erl +++ b/src/graphql.erl @@ -1,10 +1,11 @@ -module(graphql). - -include_lib("graphql/include/graphql.hrl"). -include("graphql_internal.hrl"). -include("graphql_schema.hrl"). +-compile({no_auto_import, [monitor/2]}). + %% GraphQL Documents -export([ parse/1, @@ -26,7 +27,9 @@ %% Deferred execution -export([ token/1, reply_cast/2, - sync/3 + sync/3, + monitor/2, + map/2 ]). %% Schema Definitions @@ -47,6 +50,10 @@ -export_type([json/0, param_context/0]). -type token() :: {'$graphql_token', pid(), reference(), reference()}. +-type defer_map() :: #{ worker => pid(), + timeout => non_neg_integer(), + apply => [fun()]}. +-type result() :: {ok, term()} | {error, term()} | {defer, token()} | {defer, token(), defer_map()}. -type name() :: {name, pos_integer(), binary()} | binary(). -type document() :: #document{}. -type directive() :: #directive{}. @@ -74,6 +81,18 @@ token(#{ defer_process := Proc, defer_request_id := ReqId }) -> sync(#{ defer_process := Proc, defer_request_id := ReqId }, Pid, Msg) -> Proc ! {'$graphql_sync', ReqId, Pid, Msg}. +-spec monitor(pid(), result()) -> result(). +monitor(_Worker, {ok, Value}) -> {ok, Value}; +monitor(_Worker, {error, Reason}) -> {error, Reason}; +monitor(Worker, {defer, Token}) -> monitor(Worker, {defer, Token, #{}}); +monitor(Worker, {defer, Token, Map}) when is_pid(Worker) -> {defer, Token, Map#{ worker => Worker}}. + +map(F, {ok, Value}) -> F({ok, Value}); +map(F, {error, Reason}) -> F({error, Reason}); +map(F, {defer, Token}) -> map(F, {defer, Token, #{}}); +map(F, {defer, Token, #{ apply := App} = M}) -> {defer, Token, M#{ apply := queue:in(F, App)}}; +map(F, {defer, Token, #{} = M}) -> {defer, Token, M#{ apply => queue:in(F, queue:new())}}. + %% @private token_ref({'$graphql_token', _, _, Ref}) -> Ref. diff --git a/src/graphql_check.erl b/src/graphql_check.erl index 429b723..c8eaae4 100644 --- a/src/graphql_check.erl +++ b/src/graphql_check.erl @@ -38,7 +38,7 @@ %%% Algorithm: %%% %%% We use a bidirectional type checker. In general we handle two kinds of -%%% typing constructs: G |- e => t (inference) and G |- e <= t,e' (checking) +%%% typing constructs: G |- e ==> t (inference) and G |- e <= t,e' (checking) %%% The first of these gets G,e as inputs and derives a t. The second form %%% gets G, e, and t as inputs and derives e' which is an e annotated with %%% more information. @@ -61,7 +61,14 @@ { path = [] :: [any()], vars = #{} :: #{ binary() => #vardef{} }, - frags = #{} :: #{ binary() => #frag{} } + frags = #{} :: #{ binary() => #frag{} }, + + %% Current subcontext we are checking under. We are either + %% running in the "query context" of an input query to the system + %% or in the "variable context" of variables given as a JSON structure. + %% + %% The latter has different handling rules w.r.t, enumerated types + sub_context = query :: query | variable }). -type ctx() :: #ctx{}. -type polarity() :: '+' | '-' | '*'. @@ -240,20 +247,39 @@ check_args_(_Ctx, [], [], Acc) -> {ok, Acc}; check_args_(Ctx, [_|_] = Args, [], _Acc) -> err(Ctx, {excess_args, Args}); -check_args_(Ctx, Args, [{N, #schema_arg { ty = TyName }} = SArg | Next], Acc) -> +check_args_(Ctx, Args, [{N, #schema_arg { ty = ArgTy, + default = Default }}| Next], Acc) -> CtxP = add_path(Ctx, N), - {ok, Sigma} = infer_input_type(Ctx, TyName), + {ok, Sigma} = infer_input_type(Ctx, ArgTy), - {ok, {_, #{ type := ArgTy, value := Val}}, NextArgs} = - take_arg(CtxP, SArg, Args), - {ok, Tau} = infer_input_type(Ctx, ArgTy), - - %% Verify type compabitility - ok = sub_input(CtxP, Tau, Sigma), - Res = case check_value(CtxP, Val, Tau) of - {ok, RVal} -> {N, #{ type => Tau, value => RVal}} - end, - check_args_(Ctx, NextArgs, Next, [Res|Acc]). + case lists:keytake(N, 1, Args) of + {value, {_, null}, _} -> + %% You are currently not allowed to input null values + err(CtxP, {null_input, N}); + {value, {_, Val}, RemainingArgs} -> + %% Found argument with value Val + Res = case check_value(CtxP, Val, Sigma) of + {ok, #var{} = Var} -> + {N, #{ type => Sigma, + value => Var#var { default = Default }}}; + {ok, RVal} -> + {N, #{ type => Sigma, + value => RVal}} + end, + check_args_(Ctx, RemainingArgs, Next, [Res|Acc]); + false -> + case {Sigma, Default} of + {{non_null, _}, undefined} -> + err(Ctx, missing_non_null_param); + {{non_null, _}, null} -> + err(Ctx, missing_non_null_param); + _ -> + {ok, Coerced} = coerce_default_param(CtxP, Default, Sigma), + Res = {N, #{ type => Sigma, + value => Coerced }}, + check_args_(Ctx, Args, Next, [Res|Acc]) + end + end. check_directive(Ctx, Context, #directive{ args = Args, id = ID} = D, #directive_type { args = SArgs, locations = Locations } = Ty) -> @@ -287,19 +313,30 @@ check_directives(Ctx, OpType, Dirs) -> %% Judge a type and a value. Used to verify a type judgement of the %% form 'G |- v <= T,e'' for a value 'v' and a type 'T'. Analysis has shown that %% it is most efficient to make the case analysis follow 'v' over 'T'. +check_value(Ctx, Val, Ty) when is_binary(Ty) -> + {ok, Sigma} = infer_input_type(Ctx, Ty), + check_value(Ctx, Val, Sigma); check_value(Ctx, {name, _, N}, Sigma) -> check_value(Ctx, N, Sigma); check_value(Ctx, {var, ID}, Sigma) -> CtxP = add_path(Ctx, {var, ID}), {ok, #vardef { ty = Tau}} = infer(Ctx, {var, ID}), ok = sub_input(CtxP, Tau, Sigma), - {ok, {var, ID, Tau}}; + {ok, #var { id = ID, ty = Tau }}; +check_value(Ctx, undefined, {non_null, _} = Sigma) -> + err(Ctx, {type_mismatch, + #{ document => undefined, + schema => Sigma }}); check_value(Ctx, null, {non_null, _} = Sigma) -> err(Ctx, {type_mismatch, #{ document => null, schema => Sigma }}); check_value(Ctx, Val, {non_null, Sigma}) -> check_value(Ctx, Val, Sigma); +check_value(_Ctx, undefined, _Sigma) -> + %% Values not given are currently defaulted to the value null + %% @todo: Lift this curse + {ok, null}; check_value(_Ctx, null, _Sigma) -> %% Null values are accepted in every other context {ok, null}; @@ -325,6 +362,13 @@ check_value(Ctx, {enum, N}, #enum_type { id = ID } = Sigma) -> #{ document => Others, schema => Sigma }}) end; +check_value(Ctx, Obj, #input_object_type{} = Tau) when is_map(Obj) -> + %% When an object comes in through JSON for example, then the input object + %% will be a map which is already unique in its fields. To handle this, turn + %% the object into the same form as the one we use on query documents and pass + %% it on. Note that the code will create a map later on once the input has been + %% uniqueness-checked. + check_value(Ctx, {input_object, maps:to_list(Obj)}, Tau); check_value(Ctx, {input_object, _} = InputObj, Sigma) -> case Sigma of #input_object_type{} -> @@ -336,12 +380,15 @@ check_value(Ctx, {input_object, _} = InputObj, Sigma) -> end; check_value(Ctx, Val, #scalar_type{} = Sigma) -> coerce(Ctx, Val, Sigma); -check_value(Ctx, String, #enum_type{}) when is_binary(String) -> +check_value(#ctx { sub_context = query } = Ctx, String, #enum_type{}) when is_binary(String) -> %% The spec (Jun2018, section 3.9 - Input Coercion) says that this %% is not allowed, unless given as a parameter. In this case, it %% is not given as a parameter, but is expanded in as a string in %% a query document. Reject. err(Ctx, enum_string_literal); +check_value(#ctx { sub_context = variable } = Ctx, String, #enum_type{} = Tau) when is_binary(String) -> + %% In the case of a sub context for variables, we are allowed to handle the case + check_value(Ctx, {enum, String}, Tau); check_value(Ctx, Val, #enum_type{} = Sigma) -> coerce(Ctx, Val, Sigma); check_value(Ctx, Val, Sigma) -> @@ -370,30 +417,34 @@ check_input_obj_(Ctx, Obj, [], Acc) -> 0 -> Acc; K when K > 0 -> err(Ctx, {excess_fields_in_object, Obj}) end; -%% @todo: Clearly this has to change because Ty isn't known at this check_input_obj_(Ctx, Obj, [{Name, #schema_arg { ty = Ty, default = Default }} | Next], Acc) -> - Result = case maps:get(Name, Obj, not_found) of - not_found -> - check_input_obj_null(add_path(Ctx, Name), Default, Ty); - V -> - CtxP = add_path(Ctx, Name), - {ok, Tau} = infer_input_type(CtxP, Ty), - {ok, R} = check_param(CtxP, V, Tau), - R - end, + CtxP = add_path(Ctx, Name), + {ok, Result} = + case maps:get(Name, Obj, not_found) of + not_found -> + case check_not_found(CtxP, Ty, Default) of + undefined -> + coerce_default_param(CtxP, null, Ty); + default -> + coerce_default_param(CtxP, Default, Ty) + end; + V -> + {ok, Tau} = infer_input_type(CtxP, Ty), + case check_value(CtxP, V, Tau) of + {ok, #var{} = Var} -> + {ok, Coerced} = coerce_default_param(CtxP, Default, Ty), + {ok, Var#var { default = Coerced }}; + {ok, Res} -> + {ok, Res} + end + end, check_input_obj_(Ctx, maps:remove(Name, Obj), Next, Acc#{ Name => Result }). -check_input_obj_null(Ctx, null, {non_null, _}) -> - err(Ctx, missing_non_null_param); -check_input_obj_null(Ctx, Default, Ty) -> - {ok, R} = coerce_default_param(Ctx, Default, Ty), - R. - check_sset(Ctx, [], Ty) -> case Ty of #object_type{} -> err(Ctx, fieldless_object); @@ -537,7 +588,8 @@ check_params(FunEnv, OpName, Params) -> err(#ctx{}, {operation_not_found, OpName}); VarEnv -> Ctx = #ctx { vars = VarEnv, - path = [OpName] }, + path = [OpName], + sub_context = variable }, check_params_(Ctx, Params) end catch throw:{error, Path, Msg} -> @@ -550,81 +602,34 @@ check_params(FunEnv, OpName, Params) -> %% type is valid. check_params_(#ctx { vars = VE } = Ctx, OrigParams) -> F = fun - (Key, Tau, Parameters) -> - {ok, Val} = check_param(add_path(Ctx, Key), - maps:get(Key, Parameters, not_found), - Tau), - Parameters#{ Key => Val } + (Key, #vardef { ty = Tau, default = Default}, Parameters) -> + CtxP = add_path(Ctx, Key), + case maps:get(Key, Parameters, not_found) of + not_found -> + case check_not_found(CtxP, Tau, Default) of + undefined -> + Parameters; + default -> + {ok, Res} = coerce_default_param(CtxP, Default, Tau), + Parameters#{ Key => Res } + end; + Value -> + {ok, Res} = check_value(CtxP, Value, Tau), + Parameters#{ Key => Res } + end end, maps:fold(F, OrigParams, VE). -%% When checking parameters, we must consider the case of default values. -%% If a given parameter is not given, and there is a default, we can supply -%% the default value in some cases. The spec requires special handling of -%% null values, which are handled here. -check_param(Ctx, not_found, Tau) -> - case Tau of - #vardef { ty = {non_null, _}, default = null } -> - err(Ctx, missing_non_null_param); - #vardef { default = Default, ty = Ty } -> - coerce_default_param(Ctx, Default, Ty) - end; -check_param(Ctx, Val, #vardef { ty = Tau }) -> - check_param_(Ctx, Val, Tau); -check_param(Ctx, Val, Tau) -> - check_param_(Ctx, Val, Tau). - -%% Lift types up if needed -check_param_(Ctx, Val, Ty) when is_binary(Ty) -> - {ok, Tau} = infer_input_type(Ctx, Ty), - check_param_(Ctx, Val, Tau); -check_param_(Ctx, {var, ID}, Sigma) -> - CtxP = add_path(Ctx, {var, ID}), - {ok, #vardef { ty = Tau}} = infer(Ctx, {var, ID}), - ok = sub_input(CtxP, Tau, Sigma), - {ok, {var, ID, Tau}}; -check_param_(Ctx, null, {non_null, _}) -> - err(Ctx, non_null); -check_param_(Ctx, Val, {non_null, Tau}) -> - %% Here, the value cannot be null due to the preceeding clauses - check_param_(Ctx, Val, Tau); -check_param_(_Ctx, null, _Tau) -> - {ok, null}; -check_param_(Ctx, Lst, {list, Tau}) when is_list(Lst) -> - %% Build a dummy structure to match the recursor. Unwrap this - %% structure before replacing the list parameter. - %% - %% @todo: Track the index here - {ok, [begin - {ok, V} = check_param_(Ctx, X, Tau), - V - end || X <- Lst]}; -check_param_(Ctx, Val, #scalar_type{} = Tau) -> - coerce(Ctx, Val, Tau); -check_param_(Ctx, {enum, Val}, #enum_type{} = Tau) when is_binary(Val) -> - check_param_(Ctx, Val, Tau); -check_param_(Ctx, Val, #enum_type { id = Ty } = Tau) when is_binary(Val) -> - %% Determine the type of any enum term, and then coerce it - case graphql_schema:validate_enum(Ty, Val) of - ok -> - coerce(Ctx, Val, Tau); - not_found -> - err(Ctx, {enum_not_found, Ty, Val}); - {other_enums, OtherTys} -> - err(Ctx, {param_mismatch, {enum, Ty, OtherTys}}) - end; -check_param_(Ctx, Obj, #input_object_type{} = Tau) when is_map(Obj) -> - %% When an object comes in through JSON for example, then the input object - %% will be a map which is already unique in its fields. To handle this, turn - %% the object into the same form as the one we use on query documents and pass - %% it on. Note that the code will create a map later on once the input has been - %% uniqueness-checked. - check_param_(Ctx, {input_object, maps:to_list(Obj)}, Tau); -check_param_(Ctx, {input_object, KVPairs}, #input_object_type{} = Tau) -> - check_input_obj(Ctx, {input_object, KVPairs}, Tau); - %% Everything else are errors -check_param_(Ctx, Val, Tau) -> - err(Ctx, {param_mismatch, Val, Tau}). +%% Handle the case where the parameter isn't found in the system +%% In this case, we handle nullability through this matching rule set +check_not_found(Ctx, {non_null, _}, null) -> + err(Ctx, missing_non_null_param); +check_not_found(Ctx, {non_null, _}, undefined) -> + err(Ctx, missing_non_null_param); +check_not_found(_Ctx, _Tau, undefined) -> + undefined; +check_not_found(_Ctx, _Tau, _Default) -> + default. %% -- SUBTYPE/SUBSUMPTION ------------------------------------------------------ %% @@ -802,8 +807,10 @@ coerce_name(Name) -> graphql_ast:name(Name). %% type checking on the default values in the schema type checker. %% There is absolutely no reason to do something like this then since %% it can never fail like this. +coerce_default_param(#ctx { }, undefined, _Ty) -> + {ok, undefined}; coerce_default_param(#ctx { path = Path } = Ctx, Default, Ty) -> - try check_param(Ctx, Default, Ty) of + try check_value(Ctx, Default, Ty) of Result -> Result catch Class:Err -> @@ -812,7 +819,7 @@ coerce_default_param(#ctx { path = Path } = Ctx, Default, Ty) -> {default_value, Default}, {type, graphql_err:format_ty(Ty)}, {default_coercer_error, Class, Err}]), - err(Path, non_coercible_default) + err(Ctx, non_coercible_default) end. coerce(Ctx, Val, #enum_type { id = ID, resolve_module = ResolveMod }) -> @@ -893,28 +900,6 @@ directive_location(#op { ty = Ty }) -> {subscription, _} -> 'SUBSCRIPTION' end. -%% Pull out a value from a list of arguments. This is used to check -%% we eventually cover all arguments properly since we can check if there -%% are excess arguments in the end. -take_arg(Ctx, {Key, #schema_arg { ty = Tau, - default = Default }}, Args) -> - case lists:keytake(Key, 1, Args) of - {value, {_, null}, _NextArgs} -> - %% You are currently not allowed to input null values - err(Ctx, {null_input, Key}); - {value, {_, Val}, NextArgs} -> - %% Argument found, use it - {ok, {Key, #{ type => Tau, value => Val}}, NextArgs}; - false -> - %% Argument was not given. Resolve default value if any - case {Tau, Default} of - {{non_null, _}, null} -> - err(Ctx, missing_non_null_param); - _ -> - {ok, {Key, #{ type => Tau, value => Default}}, Args} - end - end. - %% Determine the operation whih the call wants to run operation(FunEnv, <<>>, Params) -> %% Supplying an empty string is the same as not supplying anything at all diff --git a/src/graphql_execute.erl b/src/graphql_execute.erl index d107001..6ac0428 100644 --- a/src/graphql_execute.erl +++ b/src/graphql_execute.erl @@ -397,7 +397,7 @@ execute_field(#ectx{ op_type = OpType, execute_field_await(Ctx, ElaboratedTy, Fields, Ref); {defer, Token, undefined} -> Monitor = undefined, - field_closure(Ctx, ElaboratedTy, Fields, Token, Monitor, DT); + field_closure(Ctx, ElaboratedTy, Fields, Token, Monitor, DT, queue:new()); {defer, Token, DeferStateMap} when is_map(DeferStateMap) -> defer_field_closure(Ctx, ElaboratedTy, Fields, Token, DeferStateMap); ResolvedValue -> @@ -418,11 +418,17 @@ defer_field_closure(#ectx{ defer_target = _Upstream, ElaboratedTy, Fields, Token, DeferStateMap) -> TimeOut = maps:get(timeout, DeferStateMap, DT), Worker = maps:get(worker, DeferStateMap, undefined), + ApplyChain = maps:get(apply, DeferStateMap, queue:new()), Monitor = build_monitor(Worker), - field_closure(Ctx, ElaboratedTy, Fields, Token, Monitor, TimeOut). + field_closure(Ctx, ElaboratedTy, Fields, Token, Monitor, TimeOut, ApplyChain). field_closure(#ectx{ defer_target = Upstream } = Ctx, - ElaboratedTy, Fields, Token, Monitor, TimeOut) -> + ElaboratedTy, + Fields, + Token, + Monitor, + TimeOut, + ApplyChain) -> Ref = graphql:token_ref(Token), Closure = fun @@ -443,24 +449,31 @@ field_closure(#ectx{ defer_target = Upstream } = Ctx, }; (ResolverResult) -> remove_monitor(Monitor), - ResVal = handle_resolver_result(ResolverResult), - case complete_value(Ctx, ElaboratedTy, Fields, ResVal) of - {ok, Result, Errs} -> - #done { upstream = Upstream, - key = Ref, - cancel = [], - demonitor = Monitor, - result = {ok, Result, Errs} }; - {error, Errs} -> - #done { upstream = Upstream, - key = Ref, - cancel = [], - demonitor = Monitor, - result = {error, Errs} }; - #work { items = Items, demonitors = Ms } = Wrk -> + case apply_chain(ResolverResult, queue:to_list(ApplyChain)) of + {go, AppliedResult} -> + ResVal = handle_resolver_result(AppliedResult), + case complete_value(Ctx, ElaboratedTy, Fields, ResVal) of + {ok, Result, Errs} -> + #done { upstream = Upstream, + key = Ref, + cancel = [], + demonitor = Monitor, + result = {ok, Result, Errs} }; + {error, Errs} -> + #done { upstream = Upstream, + key = Ref, + cancel = [], + demonitor = Monitor, + result = {error, Errs} }; + #work { items = Items, demonitors = Ms } = Wrk -> + NewRef = upstream_ref(Items), + Wrk#work { change_ref = {Upstream, Ref, NewRef}, + demonitors = [Monitor] ++ Ms} + end; + {defer, NewToken, DeferState} -> + #work { items = Items } = Wrk = defer_field_closure(Ctx, ElaboratedTy, Fields, NewToken, DeferState), NewRef = upstream_ref(Items), - Wrk#work { change_ref = {Upstream, Ref, NewRef}, - demonitors = [Monitor] ++ Ms} + Wrk#work { change_ref = {Upstream, Ref, NewRef}} end end, #work { items = [{Ref, Closure}], @@ -471,6 +484,21 @@ field_closure(#ectx{ defer_target = Upstream } = Ctx, {M, _} -> #{ M => Ref } end }. +apply_chain(Val, []) -> + {go, Val}; +apply_chain(Val, [F|Fs]) -> + case F(Val) of + {ok, _Val} = Ok -> apply_chain(Ok, Fs); + {error, _Reason} = Error -> apply_chain(Error, Fs); + {defer, Token} -> + {defer, Token, #{ apply => queue:from_list(Fs) }}; + {defer, Token, #{ apply := ToApply} = M} -> + %% Insert the rest of the chain at the front in reverse + %% order, so the item deepest in the list goes in first. + NewQueue = lists:foldr(fun queue:in_r/2, Fs, ToApply), + {defer, Token, M#{ apply := NewQueue }} + end. + report_wrong_return(Obj, Name, Fun, Val) -> error_logger:error_msg( "Resolver ~p.~p returned wrong value: ~p(..) -> ~p", @@ -928,27 +956,40 @@ resolve_args_(Ctx, [{ID, Val} | As], Acc) -> %% %% For a discussion about the Pet -> [Pet] coercion in the %% specification, see (Oct2016 Section 3.1.7) -var_coerce(S, T, V) when is_binary(S) -> - X = graphql_schema:lookup(S), - var_coerce(X, T, V); -var_coerce(S, T, V) when is_binary(T) -> - X = graphql_schema:lookup(T), - var_coerce(S, X, V); -var_coerce(Tau, Tau, Value) -> Value; +var_coerce(Tau, Sigma, V) when is_binary(Sigma) -> + X = graphql_schema:lookup(Sigma), + var_coerce(Tau, X, V); +var_coerce(Tau, Sigma, V) when is_binary(Tau) -> + X = graphql_schema:lookup(Tau), + var_coerce(X, Sigma, V); +var_coerce(Refl, Refl, Value) -> Value; var_coerce({non_null, Tau}, {non_null, Sigma}, Value) -> var_coerce(Tau, Sigma, Value); var_coerce({non_null, Tau}, Tau, Value) -> Value; +var_coerce({list, Tau}, {list, Sigma}, Values) -> + var_coerce(Tau, Sigma, Values); var_coerce(Tau, {list, SType}, Value) -> [var_coerce(Tau, SType, Value)]. %% Produce a valid value for an argument. value(Ctx, {Ty, Val}) -> value(Ctx, Ty, Val); value(Ctx, #{ type := Ty, value := Val }) -> value(Ctx, Ty, Val). -value(#ectx{ params = Params } = _Ctx, SType, {var, ID, DType}) -> +value(#ectx{ params = Params }, SType, #var { id = ID, ty = DType, + default = Default }) -> %% Parameter expansion and type check is already completed %% at this stage - Value = maps:get(name(ID), Params), - var_coerce(DType, SType, Value); + case maps:get(name(ID), Params, not_found) of + not_found -> + case Default of + %% Coerce undefined values to "null" + undefined -> var_coerce(DType, SType, null); + _ -> var_coerce(DType, SType, Default) + end; + Value -> + var_coerce(DType, SType, Value) + end; +value(_Ctx, _Ty, undefined) -> + null; value(_Ctx, _Ty, null) -> null; value(Ctx, {non_null, Ty}, Val) -> diff --git a/src/graphql_internal.hrl b/src/graphql_internal.hrl index 47b21fd..61cb6a7 100644 --- a/src/graphql_internal.hrl +++ b/src/graphql_internal.hrl @@ -28,7 +28,9 @@ -record(field, { id :: graphql:name(), - args = [] :: [any()], + args = [] :: [#{ type := graphql_type(), + value := value(), + default := undefined | value() }], directives = [] :: [any()], selection_set = [] :: [any()], alias = undefined :: undefined | graphql:name(), @@ -54,9 +56,14 @@ -record(vardef, { id :: graphql:name(), ty :: graphql_type(), - default = null :: value() + default = undefined :: undefined | value() }). +-record(var, + { id :: graphql:name(), + ty :: graphql_type(), + default = undefined :: undefined | value() }). + -record(op, { ty :: undefined | operation_type(), id = 'ROOT' :: graphql:name() | 'ROOT', @@ -91,7 +98,7 @@ -record(p_input_value, { id :: graphql:name(), description = undefined :: 'undefined' | binary(), - default = null :: any(), + default = undefined :: undefined | value(), directives = [] :: [graphql:directive()], type :: graphql_type() }). diff --git a/src/graphql_introspection.erl b/src/graphql_introspection.erl index 2e9382c..bfc119e 100644 --- a/src/graphql_introspection.erl +++ b/src/graphql_introspection.erl @@ -182,7 +182,11 @@ render_input_value({K, #schema_arg { ty = Ty, <<"name">> => K, <<"description">> => Desc, <<"type">> => ?LAZY(render_type(Ty)), - <<"defaultValue">> => Default + <<"defaultValue">> => + case Default of + undefined -> null; + _ -> Default + end }}. interface_implementors(ID) -> diff --git a/src/graphql_parser.yrl b/src/graphql_parser.yrl index 6e8e8c8..05d6fb4 100644 --- a/src/graphql_parser.yrl +++ b/src/graphql_parser.yrl @@ -107,7 +107,7 @@ VariableDefinitionList -> VariableDefinition : ['$1']. VariableDefinitionList -> VariableDefinition VariableDefinitionList : ['$1'|'$2']. VariableDefinition -> Variable ':' Type : - #vardef { id = '$1', ty = '$3', default = null }. + #vardef { id = '$1', ty = '$3' }. VariableDefinition -> Variable ':' Type DefaultValue : #vardef { id = '$1', ty = '$3', default = '$4' }. diff --git a/src/graphql_scalar_bool_coerce.erl b/src/graphql_scalar_bool_coerce.erl index c3f67d9..2cafc8b 100644 --- a/src/graphql_scalar_bool_coerce.erl +++ b/src/graphql_scalar_bool_coerce.erl @@ -3,7 +3,9 @@ -export([input/2, output/2]). input(_, true) -> {ok, true}; +input(_, <<"true">>) -> {ok, true}; input(_, false) -> {ok, false}; +input(_, <<"false">>) -> {ok, false}; input(_, _) -> {error, not_bool}. output(<<"Bool">>, true) -> {ok, true}; diff --git a/src/graphql_schema_canonicalize.erl b/src/graphql_schema_canonicalize.erl index 20666be..78478bb 100644 --- a/src/graphql_schema_canonicalize.erl +++ b/src/graphql_schema_canonicalize.erl @@ -113,7 +113,7 @@ c_input_value_val(#{ type := _, description := _ } = M) -> c_arg_val(M). % these functions are currently identical default(#{ default := Def }) -> Def; -default(#{ }) -> null. +default(#{ }) -> undefined. c_field(K, V) -> {binarize(K), c_field_val(V)}. diff --git a/test/dungeon_SUITE.erl b/test/dungeon_SUITE.erl index 2ee3500..c553015 100644 --- a/test/dungeon_SUITE.erl +++ b/test/dungeon_SUITE.erl @@ -55,51 +55,50 @@ end_per_testcase(_Case, _Config) -> groups() -> Dungeon = {dungeon, [], - [ unions - , defer - , union_errors - , error_handling - , scalar_output_coercion - , populate - , default_query - , direct_input - , fixed_input - , nested_input_object - , inline_fragment - , fragment_over_union_interface - , integer_in_float_context - , scalar_as_expression_coerce - , non_null_field - , complex_modifiers - , simple_field_merge - , nested_field_merge - , multiple_monsters_and_rooms - , include_directive - , introspection - , introspection_with_variable - , get_operation - , coercion_int_float - , replace_enum_representation - , auxiliary_data - , find_monster - , find_monster_singleton - , invalid_scalar_int_input - , introspect_default_value - ]}, + [ unions, + defer, + union_errors, + error_handling, + scalar_output_coercion, + populate, + default_query, + direct_input, + fixed_input, + nested_input_object, + inline_fragment, + fragment_over_union_interface, + integer_in_float_context, + scalar_as_expression_coerce, + non_null_field, + complex_modifiers, + simple_field_merge, + nested_field_merge, + multiple_monsters_and_rooms, + include_directive, + introspection, + introspection_with_variable, + get_operation, + coercion_int_float, + replace_enum_representation, + auxiliary_data, + find_monster, + find_monster_singleton, + invalid_scalar_int_input, + introspect_default_value, + default_parameter ]}, Errors = {errors, [], - [ unknown_variable - , null_input - , missing_fragment - , quoted_input_error - , input_coerce_error_exception - , input_coerce_error - , invalid_enums - , invalid_enum_result - , invalid_type_resolution - , duplicate_validation - , invalid_list_resolver - ]}, + [ unknown_variable, + null_input, + missing_fragment, + quoted_input_error, + input_coerce_error_exception, + input_coerce_error, + invalid_enums, + invalid_enum_result, + invalid_type_resolution, + duplicate_validation, + invalid_list_resolver ]}, %% Groups [ Dungeon , Errors @@ -125,11 +124,11 @@ run(Config, File, Q, Params) -> default_query(Config) -> ID = ?config(known_goblin_id_1, Config), - #{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin">>, <<"hitpoints">> := 10 }}} = + #{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin!">>, <<"hitpoints">> := 10 }}} = run(Config, <<"GoblinQuery">>, #{}), - #{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin">>, <<"stats">> := [#{ <<"attack">> := 3 }] }}} = + #{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin!">>, <<"stats">> := [#{ <<"attack">> := 3 }] }}} = run(Config, <<"MinGoblin">>, #{<<"minAttack">> => 0 }), - #{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin">>, <<"stats">> := [] }}} = + #{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin!">>, <<"stats">> := [] }}} = run(Config, <<"MinGoblin">>, #{<<"minAttack">> => 30 }), ok. @@ -188,7 +187,7 @@ coercion_int_float(Config) -> get_operation(Config) -> GoblinId = ?config(known_goblin_id_1, Config), - Expected = #{ data => #{<<"monster">> => #{ <<"name">> => <<"goblin">> }}}, + Expected = #{ data => #{<<"monster">> => #{ <<"name">> => <<"goblin!">> }}}, Q1 = "{ monster(id: \"" ++ binary_to_list(GoblinId) ++ "\") { name }}", Expected = th:x(Config, Q1), Q2 = "query Q { monster(id: \"" ++ binary_to_list(GoblinId) ++ "\") { name }}", @@ -211,7 +210,7 @@ include_directive(Config) -> #{ data := #{ <<"goblin">> := #{ <<"id">> := GoblinId, - <<"name">> := <<"goblin">>, + <<"name">> := <<"goblin!">>, <<"hitpoints">> := 10 }}} = run(Config, <<"GoblinQueryDirectives">>, #{ <<"fat">> => true }), @@ -226,7 +225,7 @@ include_directive(Config) -> #{ data := #{ <<"goblin">> := #{ <<"id">> := GoblinId, - <<"name">> := <<"goblin">>, + <<"name">> := <<"goblin!">>, <<"hitpoints">> := 10 }}} = run(Config, <<"GoblinQueryDirectivesInline">>, #{ <<"fat">> => true }), ok. @@ -239,21 +238,21 @@ unions(Config) -> Expected1 = #{ data => #{ <<"goblin">> => #{ <<"id">> => OpaqueId, - <<"name">> => <<"goblin">>, + <<"name">> => <<"goblin!">>, <<"hitpoints">> => 10 }}}, Expected1 = run(Config, <<"GoblinQuery">>, #{<<"id">> => OpaqueId}), ct:log("Same query, but on items"), Expected2 = #{ data => #{ <<"goblin">> => #{ <<"id">> => OpaqueId, - <<"name">> => <<"goblin">>, + <<"name">> => <<"goblin!">>, <<"hitpoints">> => 10 }}}, Expected2 = run(Config, <<"GoblinThingQuery">>, #{ <<"id">> => OpaqueId }), ct:log("Union expansions"), Expected3 = #{ data => #{ <<"things">> => [#{}]}}, Expected3 = run(Config, <<"ThingQ1">>, #{ }), - Expected4 = #{ data => #{ <<"things">> => [#{ <<"__typename">> => <<"Monster">>, <<"name">> => <<"goblin">> }]}}, + Expected4 = #{ data => #{ <<"things">> => [#{ <<"__typename">> => <<"Monster">>, <<"name">> => <<"goblin!">> }]}}, Expected4 = run(Config, <<"ThingQ2">>, #{ }), Expected5 = #{ data => #{ <<"things">> => [#{ <<"__typename">> => <<"Monster">> }]}}, @@ -283,7 +282,7 @@ scalar_output_coercion(Config) -> #{ data := #{ <<"goblin">> := #{ <<"id">> := OpaqueId, - <<"name">> := <<"goblin">>, + <<"name">> := <<"goblin!">>, <<"color">> := <<"#41924B">>, <<"hitpoints">> := 10 }}} = run(Config, <<"ScalarOutputCoercion">>, #{ <<"id">> => OpaqueId }), @@ -418,6 +417,69 @@ populate(Config) -> ExpectedNestedInput = run(Config, <<"IntroduceMonsterNestedVar">>, NestedInput), + ExpectedDefaultNestedInput = + #{data => + #{<<"introduceMonster">> => + #{<<"clientMutationId">> => <<"123">>, + <<"monster">> => + #{<<"color">> => <<"#444444">>, + <<"hitpoints">> => 9001, + <<"id">> => <<"bW9uc3RlcjoxMDA4">>, + <<"mood">> => <<"AGGRESSIVE">>, + <<"name">> => <<"Tiny Evil Cat">>, + <<"plushFactor">> => 57.0, + <<"properties">> => [<<"BEAST">>], + <<"stats">> => + [#{<<"attack">> => 1337, + <<"shellScripting">> => 10, + <<"yell">> => + <<"Purrrrrrrrrrrrrr!">>}]}}}}, + + ExpectedDefaultNestedInput = run(Config, <<"IntroduceMonsterDefaultNestedVar">>, #{}), + + ExpectedOptionalNestedInput = + #{data => + #{<<"introduceMonster">> => + #{<<"clientMutationId">> => <<"123">>, + <<"monster">> => + #{<<"color">> => <<"#FFFFFF">>, + <<"hitpoints">> => 1, + <<"id">> => <<"bW9uc3RlcjoxMDA5">>, + <<"mood">> => <<"DODGY">>, + <<"name">> => <<"Teeny Tiny Mouse">>, + <<"plushFactor">> => 10.0, + <<"properties">> => [<<"BEAST">>], + <<"stats">> => + [#{<<"attack">> => 1, + <<"shellScripting">> => 1, + <<"yell">> => + <<"Meek!">>}]}}}}, + + ExpectedOptionalNestedInput = run(Config, <<"IntroduceMonsterOptionalNestedVar">>, #{}), + + + ct:log("Check for proper null-handling"), + + ExpectedNullHandling = + #{data => + #{<<"introduceMonster">> => + #{<<"clientMutationId">> => <<"123">>, + <<"monster">> => + #{<<"color">> => <<"#000">>, + <<"hitpoints">> => 9002, + <<"id">> => <<"bW9uc3RlcjoxMDEw">>, + <<"mood">> => <<"DODGY">>, + <<"name">> => <<"Tiny Black Hole">>, + <<"plushFactor">> => 0.01, + <<"properties">> => [<<"BEAST">>], + <<"stats">> => + [#{<<"attack">> => 1, + <<"shellScripting">> => 1, + <<"yell">> => + <<"...">>}]}}}}, + + ExpectedNullHandling = run(Config, <<"IntroduceMonsterNullHandling">>, #{}), + ct:log("Check duplicate enum values (BEAST mood/property)"), DupEnumInput = #{ @@ -671,7 +733,8 @@ scalar_as_expression_coerce(Config) -> <<"attack">> := 7, <<"shellScripting">> := 5, <<"yell">> := <<"...">> }]}}}} = - run(Config, <<"IntroduceMonsterFatExpr">>, #{}), + run(Config, <<"IntroduceMonsterFatExpr">>, #{ <<"properties">> => [<<"MURLOC">>, + <<"MECH">>]}), true = (PF - 0.01) < 0.00001, ok. @@ -782,14 +845,16 @@ fragment_over_union_interface(Config) -> find_monster(Config) -> Expected1 = - lists:sort([#{<<"name">> => <<"goblin">>}, - #{<<"name">> => <<"Auxiliary Undead">>}, - #{<<"name">> => <<"goblin">>}, - #{<<"name">> => <<"goblin">>}, - #{<<"name">> => <<"hobgoblin">>}, - #{<<"name">> => <<"Yellow Slime">>}, - #{<<"name">> => <<"goblin">>}, - #{<<"name">> => <<"goblin">>}]), + lists:sort([#{<<"name">> => <<"goblin!">>}, + #{<<"name">> => <<"Teeny Tiny Mouse!">>}, + #{<<"name">> => <<"Tiny Black Hole!">>}, + #{<<"name">> => <<"Auxiliary Undead!">>}, + #{<<"name">> => <<"goblin!">>}, + #{<<"name">> => <<"goblin!">>}, + #{<<"name">> => <<"hobgoblin!">>}, + #{<<"name">> => <<"Yellow Slime!">>}, + #{<<"name">> => <<"goblin!">>}, + #{<<"name">> => <<"goblin!">>}]), #{ data := #{<<"findMonsters">> := Out1 }} = run(Config, <<"FindQuery">>, #{}), Expected1 = lists:sort(Out1), #{ data := #{<<"findMonsters">> := Out2 }} = run(Config, <<"FindQueryParam">>, #{ <<"m">> => [<<"DODGY">>]}), @@ -843,14 +908,16 @@ defer(Config) -> find_monster_singleton(Config) -> Expected1 = lists:sort( - [#{<<"name">> => <<"goblin">>}, - #{<<"name">> => <<"Auxiliary Undead">>}, - #{<<"name">> => <<"goblin">>}, - #{<<"name">> => <<"goblin">>}, - #{<<"name">> => <<"hobgoblin">>}, - #{<<"name">> => <<"Yellow Slime">>}, - #{<<"name">> => <<"goblin">>}, - #{<<"name">> => <<"goblin">>}]), + [#{<<"name">> => <<"goblin!">>}, + #{<<"name">> => <<"Teeny Tiny Mouse!">>}, + #{<<"name">> => <<"Tiny Black Hole!">>}, + #{<<"name">> => <<"Auxiliary Undead!">>}, + #{<<"name">> => <<"goblin!">>}, + #{<<"name">> => <<"goblin!">>}, + #{<<"name">> => <<"hobgoblin!">>}, + #{<<"name">> => <<"Yellow Slime!">>}, + #{<<"name">> => <<"goblin!">>}, + #{<<"name">> => <<"goblin!">>}]), #{ data := #{ <<"findMonsters">> := Out1 }} = run(Config, <<"FindQuerySingleton">>, #{}), Expected1 = lists:sort(Out1), #{ data := #{ <<"findMonsters">> := Out2 }} = run(Config, <<"FindQueryParamSingleton">>, #{ <<"m">> => <<"DODGY">>}), @@ -886,7 +953,7 @@ auxiliary_data(Config) -> Expected = #{ aux => [{my_auxiliary_data, true}], data => #{ <<"monster">> => #{ <<"id">> => OpaqueId - , <<"name">> => <<"Auxiliary Undead">>} + , <<"name">> => <<"Auxiliary Undead!">>} } }, Expected = run(Config, <<"TestAuxiliaryData">>, #{<<"id">> => OpaqueId}). @@ -928,9 +995,9 @@ null_input(Config) -> [<<"TestNullInput">>,<<"room">>, <<"id">>]}]} = run(Config, <<"test_null_input_2.graphql">>, <<"TestNullInput">>, #{}), %% The following bugs must fail because the value is null which is not allowed - #{ errors := [#{ extensions := #{ code := non_null }}]} = + #{ errors := [#{ extensions := #{ code := type_mismatch }}]} = run(Config, <<"test_null_input_3.graphql">>, <<"TestNullInput">>, #{}), - #{ errors := [#{ extensions := #{ code := non_null }}]} = + #{ errors := [#{ extensions := #{ code := type_mismatch }}]} = run(Config, <<"test_null_input_4.graphql">>, <<"TestNullInput">>, #{ <<"input">> => #{ <<"name">> => <<"Orc">>, @@ -971,33 +1038,27 @@ invalid_enums(Config) -> <<"mood">> => <<"AGGRESSIF">> }, #{errors := - [#{ extensions := #{ code := enum_not_found }, - message := <<"The value <<\"AGGRESSIF\">> is not a valid enum value for type Mood">>, + [#{ extensions := #{ code := unknown_enum }, path := [<<"IntroduceMonster">>,<<"input">>,<<"mood">>]}]} = run(Config, <<"IntroduceMonster">>, #{ <<"input">> => Input }), #{errors := - [ #{ extensions := #{ code := enum_not_found }, - message := <<"The value <<>> is not a valid enum value for type Mood">>, + [ #{ extensions := #{ code := unknown_enum }, path := [<<"IntroduceMonster">>,<<"input">>,<<"mood">>]}]} = run(Config, <<"IntroduceMonster">>, #{ <<"input">> => Input#{ <<"mood">> => <<"">> }}), #{errors := - [#{extensions := #{ code := enum_not_found }, - message := <<"The value <<>> is not a valid enum value for type Mood">>, + [#{extensions := #{ code := unknown_enum }, path := [<<"IntroduceMonster">>,<<"input">>,<<"mood">>]}]} = run(Config, <<"IntroduceMonster">>, #{ <<"input">> => Input#{ <<"mood">> => <<>> }}), #{errors := - [#{extensions := #{ code := param_mismatch }, - message := <<"The enum value matches types [Property] but was used in a context where an enum value of type Mood was expected">>, + [#{extensions := #{ code := type_mismatch }, path := [<<"IntroduceMonster">>,<<"input">>,<<"mood">>]}]} = run(Config, <<"IntroduceMonster">>, #{ <<"input">> => Input#{ <<"mood">> => <<"DRAGON">> }}), #{ errors := - [#{ extensions := #{ code := enum_not_found }, - message := <<"The value <<\"AGGRESSIF\">> is not a valid enum value for type Mood">>, + [#{ extensions := #{ code := unknown_enum }, path := [<<"IMonster">>,<<"introduceMonster">>, <<"input">>, <<"mood">>]}]} = run(Config, "invalid_enum_1.graphql", <<"IMonster">>, #{}), #{ errors := - [#{ extensions := #{ code := enum_not_found }, - message := <<"The value <<>> is not a valid enum value for type Mood">>, + [#{ extensions := #{ code := enum_string_literal }, path := [<<"IMonster">>,<<"introduceMonster">>, <<"input">>, <<"mood">>]}]} = run(Config, "invalid_enum_2.graphql", <<"IMonster">>, #{}), ok. @@ -1031,3 +1092,17 @@ input_coerce_error_exception(Config) -> <<"color">>]}]} = run(Config, <<"IntroduceMonster">>, #{ <<"input">> => Input }), ok. + +default_parameter(Config) -> + ct:log("Run, and provide a colorType explicitly"), + #{ data := + #{ <<"monster">> := + #{ <<"color">> := <<"#727272">>}}} = + run(Config, <<"GetMonster">>, #{ <<"id">> => <<"bW9uc3Rlcjox">>, + <<"colorType">> => <<"gray">> }), + ct:log("Run, and pick up the color type via the argument default"), + #{ data := + #{ <<"monster">> := + #{ <<"color">> := <<"#41924B">>}}} = + run(Config, <<"GetMonster">>, #{ <<"id">> => <<"bW9uc3Rlcjox">> }), + ok. diff --git a/test/dungeon_SUITE_data/dungeon_schema.graphql b/test/dungeon_SUITE_data/dungeon_schema.graphql index 53a745b..f9c1696 100644 --- a/test/dungeon_SUITE_data/dungeon_schema.graphql +++ b/test/dungeon_SUITE_data/dungeon_schema.graphql @@ -103,7 +103,7 @@ input IntroduceMonsterInput { name : String! color : Color! hitpoints : Int = 15 - plushFactor : Float = 0.01 + plushFactor : Float mood : Mood = DODGY properties : [Property] = [] stats : [StatsInput] diff --git a/test/dungeon_SUITE_data/query.graphql b/test/dungeon_SUITE_data/query.graphql index a1f6d13..ffab2c1 100644 --- a/test/dungeon_SUITE_data/query.graphql +++ b/test/dungeon_SUITE_data/query.graphql @@ -130,6 +130,14 @@ query MultipleMonsters($ids : [Id!]) { } } +query GetMonster($id : ID!, $colorType : ColorType) { + monster(id: $id) { + id + name + color(colorType : $colorType) + } +} + query MultipleMonstersExpr { monsters(ids: ["bW9uc3Rlcjox", "bW9uc3Rlcjoy"]) { id @@ -352,14 +360,14 @@ mutation IntroduceMonsterFatFixedInput { } } -mutation IntroduceMonsterFatExpr { +mutation IntroduceMonsterFatExpr($properties : [Property]) { introduceMonster(input: { clientMutationId: "123", name: "Green Slime", color: "#1be215", hitpoints: 9001, mood: TRANQUIL, - properties: [MURLOC, MECH], + properties: $properties, stats: [{ attack: 7, shellScripting: 5, @@ -379,7 +387,7 @@ mutation IntroduceMonsterFatExpr { } } -mutation IntroduceMonsterNestedVar($mood : Mood!) { +mutation IntroduceMonsterNestedVar($mood : Mood = AGGRESSIVE) { introduceMonster(input: { clientMutationId: "123", name: "Giant Spider", @@ -406,6 +414,89 @@ mutation IntroduceMonsterNestedVar($mood : Mood!) { } } +mutation IntroduceMonsterDefaultNestedVar($mood : Mood = AGGRESSIVE) { + introduceMonster(input: + { clientMutationId: "123", + name: "Tiny Evil Cat", + color: "#444444", # Cats are very grey! + hitpoints: 9001, + mood: $mood, + properties: [BEAST], + plushFactor: 57.0, + stats: [{ + attack: 1337, + shellScripting: 10, + yell: "Purrrrrrrrrrrrrr!" + }] + }) { + clientMutationId, + monster { + ...FatMonsterFragment + plushFactor + stats { + attack + shellScripting + yell + } + } + } +} + +mutation IntroduceMonsterOptionalNestedVar($mood : Mood) { + introduceMonster(input: + { clientMutationId: "123", + name: "Teeny Tiny Mouse", + color: "#ffffff", # Makes your eyes bleed + hitpoints: 1, + mood: $mood, + properties: [BEAST], + plushFactor: 10.0, + stats: [{ + attack: 1, + shellScripting: 1, + yell: "Meek!" + }] + }) { + clientMutationId, + monster { + ...FatMonsterFragment + plushFactor + stats { + attack + shellScripting + yell + } + } + } +} + +mutation IntroduceMonsterNullHandling($pf : Float) { + introduceMonster(input: + { clientMutationId: "123", + name: "Tiny Black Hole", + color: "#000000", # Makes your eyes bleed + hitpoints: 9002, + properties: [BEAST], + plushFactor: $pf, + stats: [{ + attack: 1, + shellScripting: 1, + yell: "..." + }] + }) { + clientMutationId, + monster { + ...FatMonsterFragment + plushFactor + stats { + attack + shellScripting + yell + } + } + } +} + mutation IntroduceRoom($input: IntroduceRoomInput!) { introduceRoom(input: $input) { clientMutationId diff --git a/test/dungeon_monster.erl b/test/dungeon_monster.erl index 2955b71..f29ecb0 100644 --- a/test/dungeon_monster.erl +++ b/test/dungeon_monster.erl @@ -20,19 +20,34 @@ execute(Ctx, #monster { id = ID, case Field of <<"id">> -> graphql:throw(dungeon:wrap({monster, ID})); <<"name">> -> - ct:pal("Name Context Directives: ~p", [maps:get(field_directives, Ctx)]), + ct:log("Name Context Directives: ~p", [maps:get(field_directives, Ctx)]), NameToken = graphql:token(Ctx), spawn_link(fun() -> graphql:reply_cast(NameToken, {ok, Name}) end), - {defer, NameToken}; + graphql:map(fun({ok, N}) -> + {ok, <>} + end, {defer, NameToken}); <<"color">> -> color(Color, Args); <<"hitpoints">> -> HPToken = graphql:token(Ctx), + HPToken2 = graphql:token(Ctx), spawn_link(fun() -> graphql:reply_cast(HPToken, {ok, HP}) end), - {defer, HPToken}; + D = {defer, HPToken}, + X = graphql:map(fun({ok, HitPoints}) -> + V = {ok, term_to_binary(HitPoints)}, + spawn_link(fun() -> + graphql:reply_cast(HPToken2, V) + end), + {defer, HPToken2} + end, + D), + graphql:map(fun({ok, Packed}) -> + {ok, binary_to_term(Packed)} + end, + X); <<"hp">> -> {ok, HP}; <<"inventory">> -> Data = [dungeon:load(OID) || OID <- Inventory], diff --git a/test/dungeon_mutation.erl b/test/dungeon_mutation.erl index c185cee..6712f9d 100644 --- a/test/dungeon_mutation.erl +++ b/test/dungeon_mutation.erl @@ -26,9 +26,13 @@ execute(_Ctx, _, <<"introduceMonster">>, #{ <<"input">> := Input }) -> false -> exit({bad_mood_value, M}) end, + PlushFactor = case PF of + null -> 0.01; + PlushValue when is_float(PlushValue) -> PlushValue + end, {atomic, Monster} = dungeon:insert(#monster { properties = Props, - plush_factor = PF, + plush_factor = PlushFactor, stats = Ss, name = N, color = C, diff --git a/test/dungeon_scalar.erl b/test/dungeon_scalar.erl index 3718b7d..3158259 100644 --- a/test/dungeon_scalar.erl +++ b/test/dungeon_scalar.erl @@ -20,8 +20,8 @@ input(<<"Color">>, X) -> output(<<"ColorType">>, X) -> {ok, X}; output(<<"Color">>, #{ r := R, g := G, b := B}) -> - R1 = integer_to_binary(R, 16), - G1 = integer_to_binary(G, 16), - B1 = integer_to_binary(B, 16), + R1 = integer_to_binary(round(R), 16), + G1 = integer_to_binary(round(G), 16), + B1 = integer_to_binary(round(B), 16), {ok, <<"#", R1/binary, G1/binary, B1/binary>>}.