Skip to content

Feature/map #203

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,13 @@ the compatibility issues you are likely to encounter.
OTP 21 version, and if present, use the new stack-trace form. This
ensures backwards compatibility as well as proper stack trace
handling in new OTP releases.
* New command `graphql:map/2`. Given a `Result` of the form `{ok, Val} |
{defer, Token}` the call to `graphql:map(F, Result)` will apply `F`
to the result. Either now, or in the case of a defer, when the defer
completes. This yields an alternative way to handle events which
cannot be completed right away. Long running work is usually better
handled in a spawned process, but simpler changes can be handled
within the context of the GraphQL process.
* New command `graphql:sync/3`. Calling `graphql:sync(Ctx, Pid, Msg)`
will place a message into the GraphQL mailbox. When this message
occurs, we will send `Pid` a message `Msg`. This is useful for e.g.,
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ specification, except for a few areas:
In addition, we are working towards June 2018 compliance. We already
implemented many of the changes in the system. But we are still
missing some parts. The implementation plan is on a demand driven
basis for Shopgun currently, in that we tend to implement things are
basis for Shopgun currently, in that we tend to implement things when
there is a need for them.

# Documentation
Expand Down
23 changes: 21 additions & 2 deletions src/graphql.erl
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
-module(graphql).


-include_lib("graphql/include/graphql.hrl").
-include("graphql_internal.hrl").
-include("graphql_schema.hrl").

-compile({no_auto_import, [monitor/2]}).

%% GraphQL Documents
-export([
parse/1,
Expand All @@ -26,7 +27,9 @@
%% Deferred execution
-export([
token/1, reply_cast/2,
sync/3
sync/3,
monitor/2,
map/2
]).

%% Schema Definitions
Expand All @@ -47,6 +50,10 @@
-export_type([json/0, param_context/0]).

-type token() :: {'$graphql_token', pid(), reference(), reference()}.
-type defer_map() :: #{ worker => pid(),
timeout => non_neg_integer(),
apply => [fun()]}.
-type result() :: {ok, term()} | {error, term()} | {defer, token()} | {defer, token(), defer_map()}.
-type name() :: {name, pos_integer(), binary()} | binary().
-type document() :: #document{}.
-type directive() :: #directive{}.
Expand Down Expand Up @@ -74,6 +81,18 @@ token(#{ defer_process := Proc, defer_request_id := ReqId }) ->
sync(#{ defer_process := Proc, defer_request_id := ReqId }, Pid, Msg) ->
Proc ! {'$graphql_sync', ReqId, Pid, Msg}.

-spec monitor(pid(), result()) -> result().
monitor(_Worker, {ok, Value}) -> {ok, Value};
monitor(_Worker, {error, Reason}) -> {error, Reason};
monitor(Worker, {defer, Token}) -> monitor(Worker, {defer, Token, #{}});
monitor(Worker, {defer, Token, Map}) when is_pid(Worker) -> {defer, Token, Map#{ worker => Worker}}.

map(F, {ok, Value}) -> F({ok, Value});
map(F, {error, Reason}) -> F({error, Reason});
map(F, {defer, Token}) -> map(F, {defer, Token, #{}});
map(F, {defer, Token, #{ apply := App} = M}) -> {defer, Token, M#{ apply := queue:in(F, App)}};
map(F, {defer, Token, #{} = M}) -> {defer, Token, M#{ apply => queue:in(F, queue:new())}}.

%% @private
token_ref({'$graphql_token', _, _, Ref}) -> Ref.

Expand Down
68 changes: 48 additions & 20 deletions src/graphql_execute.erl
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ execute_field(#ectx{ op_type = OpType,
execute_field_await(Ctx, ElaboratedTy, Fields, Ref);
{defer, Token, undefined} ->
Monitor = undefined,
field_closure(Ctx, ElaboratedTy, Fields, Token, Monitor, DT);
field_closure(Ctx, ElaboratedTy, Fields, Token, Monitor, DT, queue:new());
{defer, Token, DeferStateMap} when is_map(DeferStateMap) ->
defer_field_closure(Ctx, ElaboratedTy, Fields, Token, DeferStateMap);
ResolvedValue ->
Expand All @@ -418,11 +418,17 @@ defer_field_closure(#ectx{ defer_target = _Upstream,
ElaboratedTy, Fields, Token, DeferStateMap) ->
TimeOut = maps:get(timeout, DeferStateMap, DT),
Worker = maps:get(worker, DeferStateMap, undefined),
ApplyChain = maps:get(apply, DeferStateMap, queue:new()),
Monitor = build_monitor(Worker),
field_closure(Ctx, ElaboratedTy, Fields, Token, Monitor, TimeOut).
field_closure(Ctx, ElaboratedTy, Fields, Token, Monitor, TimeOut, ApplyChain).

field_closure(#ectx{ defer_target = Upstream } = Ctx,
ElaboratedTy, Fields, Token, Monitor, TimeOut) ->
ElaboratedTy,
Fields,
Token,
Monitor,
TimeOut,
ApplyChain) ->
Ref = graphql:token_ref(Token),
Closure =
fun
Expand All @@ -443,24 +449,31 @@ field_closure(#ectx{ defer_target = Upstream } = Ctx,
};
(ResolverResult) ->
remove_monitor(Monitor),
ResVal = handle_resolver_result(ResolverResult),
case complete_value(Ctx, ElaboratedTy, Fields, ResVal) of
{ok, Result, Errs} ->
#done { upstream = Upstream,
key = Ref,
cancel = [],
demonitor = Monitor,
result = {ok, Result, Errs} };
{error, Errs} ->
#done { upstream = Upstream,
key = Ref,
cancel = [],
demonitor = Monitor,
result = {error, Errs} };
#work { items = Items, demonitors = Ms } = Wrk ->
case apply_chain(ResolverResult, queue:to_list(ApplyChain)) of
{go, AppliedResult} ->
ResVal = handle_resolver_result(AppliedResult),
case complete_value(Ctx, ElaboratedTy, Fields, ResVal) of
{ok, Result, Errs} ->
#done { upstream = Upstream,
key = Ref,
cancel = [],
demonitor = Monitor,
result = {ok, Result, Errs} };
{error, Errs} ->
#done { upstream = Upstream,
key = Ref,
cancel = [],
demonitor = Monitor,
result = {error, Errs} };
#work { items = Items, demonitors = Ms } = Wrk ->
NewRef = upstream_ref(Items),
Wrk#work { change_ref = {Upstream, Ref, NewRef},
demonitors = [Monitor] ++ Ms}
end;
{defer, NewToken, DeferState} ->
#work { items = Items } = Wrk = defer_field_closure(Ctx, ElaboratedTy, Fields, NewToken, DeferState),
NewRef = upstream_ref(Items),
Wrk#work { change_ref = {Upstream, Ref, NewRef},
demonitors = [Monitor] ++ Ms}
Wrk#work { change_ref = {Upstream, Ref, NewRef}}
end
end,
#work { items = [{Ref, Closure}],
Expand All @@ -471,6 +484,21 @@ field_closure(#ectx{ defer_target = Upstream } = Ctx,
{M, _} -> #{ M => Ref }
end }.

apply_chain(Val, []) ->
{go, Val};
apply_chain(Val, [F|Fs]) ->
case F(Val) of
{ok, _Val} = Ok -> apply_chain(Ok, Fs);
{error, _Reason} = Error -> apply_chain(Error, Fs);
{defer, Token} ->
{defer, Token, #{ apply => queue:from_list(Fs) }};
{defer, Token, #{ apply := ToApply} = M} ->
%% Insert the rest of the chain at the front in reverse
%% order, so the item deepest in the list goes in first.
NewQueue = lists:foldr(fun queue:in_r/2, Fs, ToApply),
{defer, Token, M#{ apply := NewQueue }}
end.

report_wrong_return(Obj, Name, Fun, Val) ->
error_logger:error_msg(
"Resolver ~p.~p returned wrong value: ~p(..) -> ~p",
Expand Down
54 changes: 27 additions & 27 deletions test/dungeon_SUITE.erl
Original file line number Diff line number Diff line change
Expand Up @@ -125,11 +125,11 @@ run(Config, File, Q, Params) ->

default_query(Config) ->
ID = ?config(known_goblin_id_1, Config),
#{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin">>, <<"hitpoints">> := 10 }}} =
#{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin!">>, <<"hitpoints">> := 10 }}} =
run(Config, <<"GoblinQuery">>, #{}),
#{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin">>, <<"stats">> := [#{ <<"attack">> := 3 }] }}} =
#{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin!">>, <<"stats">> := [#{ <<"attack">> := 3 }] }}} =
run(Config, <<"MinGoblin">>, #{<<"minAttack">> => 0 }),
#{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin">>, <<"stats">> := [] }}} =
#{ data := #{ <<"goblin">> := #{ <<"id">> := ID, <<"name">> := <<"goblin!">>, <<"stats">> := [] }}} =
run(Config, <<"MinGoblin">>, #{<<"minAttack">> => 30 }),
ok.

Expand Down Expand Up @@ -188,7 +188,7 @@ coercion_int_float(Config) ->

get_operation(Config) ->
GoblinId = ?config(known_goblin_id_1, Config),
Expected = #{ data => #{<<"monster">> => #{ <<"name">> => <<"goblin">> }}},
Expected = #{ data => #{<<"monster">> => #{ <<"name">> => <<"goblin!">> }}},
Q1 = "{ monster(id: \"" ++ binary_to_list(GoblinId) ++ "\") { name }}",
Expected = th:x(Config, Q1),
Q2 = "query Q { monster(id: \"" ++ binary_to_list(GoblinId) ++ "\") { name }}",
Expand All @@ -211,7 +211,7 @@ include_directive(Config) ->
#{ data := #{
<<"goblin">> := #{
<<"id">> := GoblinId,
<<"name">> := <<"goblin">>,
<<"name">> := <<"goblin!">>,
<<"hitpoints">> := 10 }}} =
run(Config, <<"GoblinQueryDirectives">>, #{ <<"fat">> => true }),

Expand All @@ -226,7 +226,7 @@ include_directive(Config) ->
#{ data := #{
<<"goblin">> := #{
<<"id">> := GoblinId,
<<"name">> := <<"goblin">>,
<<"name">> := <<"goblin!">>,
<<"hitpoints">> := 10 }}} =
run(Config, <<"GoblinQueryDirectivesInline">>, #{ <<"fat">> => true }),
ok.
Expand All @@ -239,21 +239,21 @@ unions(Config) ->
Expected1 = #{ data => #{
<<"goblin">> => #{
<<"id">> => OpaqueId,
<<"name">> => <<"goblin">>,
<<"name">> => <<"goblin!">>,
<<"hitpoints">> => 10 }}},
Expected1 = run(Config, <<"GoblinQuery">>, #{<<"id">> => OpaqueId}),
ct:log("Same query, but on items"),
Expected2 = #{ data => #{
<<"goblin">> => #{
<<"id">> => OpaqueId,
<<"name">> => <<"goblin">>,
<<"name">> => <<"goblin!">>,
<<"hitpoints">> => 10 }}},
Expected2 = run(Config, <<"GoblinThingQuery">>, #{ <<"id">> => OpaqueId }),
ct:log("Union expansions"),
Expected3 = #{ data => #{ <<"things">> => [#{}]}},
Expected3 = run(Config, <<"ThingQ1">>, #{ }),

Expected4 = #{ data => #{ <<"things">> => [#{ <<"__typename">> => <<"Monster">>, <<"name">> => <<"goblin">> }]}},
Expected4 = #{ data => #{ <<"things">> => [#{ <<"__typename">> => <<"Monster">>, <<"name">> => <<"goblin!">> }]}},
Expected4 = run(Config, <<"ThingQ2">>, #{ }),

Expected5 = #{ data => #{ <<"things">> => [#{ <<"__typename">> => <<"Monster">> }]}},
Expand Down Expand Up @@ -283,7 +283,7 @@ scalar_output_coercion(Config) ->
#{ data := #{
<<"goblin">> := #{
<<"id">> := OpaqueId,
<<"name">> := <<"goblin">>,
<<"name">> := <<"goblin!">>,
<<"color">> := <<"#41924B">>,
<<"hitpoints">> := 10 }}} =
run(Config, <<"ScalarOutputCoercion">>, #{ <<"id">> => OpaqueId }),
Expand Down Expand Up @@ -782,14 +782,14 @@ fragment_over_union_interface(Config) ->

find_monster(Config) ->
Expected1 =
lists:sort([#{<<"name">> => <<"goblin">>},
#{<<"name">> => <<"Auxiliary Undead">>},
#{<<"name">> => <<"goblin">>},
#{<<"name">> => <<"goblin">>},
#{<<"name">> => <<"hobgoblin">>},
#{<<"name">> => <<"Yellow Slime">>},
#{<<"name">> => <<"goblin">>},
#{<<"name">> => <<"goblin">>}]),
lists:sort([#{<<"name">> => <<"goblin!">>},
#{<<"name">> => <<"Auxiliary Undead!">>},
#{<<"name">> => <<"goblin!">>},
#{<<"name">> => <<"goblin!">>},
#{<<"name">> => <<"hobgoblin!">>},
#{<<"name">> => <<"Yellow Slime!">>},
#{<<"name">> => <<"goblin!">>},
#{<<"name">> => <<"goblin!">>}]),
#{ data := #{<<"findMonsters">> := Out1 }} = run(Config, <<"FindQuery">>, #{}),
Expected1 = lists:sort(Out1),
#{ data := #{<<"findMonsters">> := Out2 }} = run(Config, <<"FindQueryParam">>, #{ <<"m">> => [<<"DODGY">>]}),
Expand Down Expand Up @@ -843,14 +843,14 @@ defer(Config) ->
find_monster_singleton(Config) ->
Expected1 =
lists:sort(
[#{<<"name">> => <<"goblin">>},
#{<<"name">> => <<"Auxiliary Undead">>},
#{<<"name">> => <<"goblin">>},
#{<<"name">> => <<"goblin">>},
#{<<"name">> => <<"hobgoblin">>},
#{<<"name">> => <<"Yellow Slime">>},
#{<<"name">> => <<"goblin">>},
#{<<"name">> => <<"goblin">>}]),
[#{<<"name">> => <<"goblin!">>},
#{<<"name">> => <<"Auxiliary Undead!">>},
#{<<"name">> => <<"goblin!">>},
#{<<"name">> => <<"goblin!">>},
#{<<"name">> => <<"hobgoblin!">>},
#{<<"name">> => <<"Yellow Slime!">>},
#{<<"name">> => <<"goblin!">>},
#{<<"name">> => <<"goblin!">>}]),
#{ data := #{ <<"findMonsters">> := Out1 }} = run(Config, <<"FindQuerySingleton">>, #{}),
Expected1 = lists:sort(Out1),
#{ data := #{ <<"findMonsters">> := Out2 }} = run(Config, <<"FindQueryParamSingleton">>, #{ <<"m">> => <<"DODGY">>}),
Expand Down Expand Up @@ -886,7 +886,7 @@ auxiliary_data(Config) ->
Expected = #{
aux => [{my_auxiliary_data, true}],
data => #{ <<"monster">> => #{ <<"id">> => OpaqueId
, <<"name">> => <<"Auxiliary Undead">>}
, <<"name">> => <<"Auxiliary Undead!">>}
}
},
Expected = run(Config, <<"TestAuxiliaryData">>, #{<<"id">> => OpaqueId}).
Expand Down
21 changes: 18 additions & 3 deletions test/dungeon_monster.erl
Original file line number Diff line number Diff line change
Expand Up @@ -20,19 +20,34 @@ execute(Ctx, #monster { id = ID,
case Field of
<<"id">> -> graphql:throw(dungeon:wrap({monster, ID}));
<<"name">> ->
ct:pal("Name Context Directives: ~p", [maps:get(field_directives, Ctx)]),
ct:log("Name Context Directives: ~p", [maps:get(field_directives, Ctx)]),
NameToken = graphql:token(Ctx),
spawn_link(fun() ->
graphql:reply_cast(NameToken, {ok, Name})
end),
{defer, NameToken};
graphql:map(fun({ok, N}) ->
{ok, <<N/binary, "!">>}
end, {defer, NameToken});
<<"color">> -> color(Color, Args);
<<"hitpoints">> ->
HPToken = graphql:token(Ctx),
HPToken2 = graphql:token(Ctx),
spawn_link(fun() ->
graphql:reply_cast(HPToken, {ok, HP})
end),
{defer, HPToken};
D = {defer, HPToken},
X = graphql:map(fun({ok, HitPoints}) ->
V = {ok, term_to_binary(HitPoints)},
spawn_link(fun() ->
graphql:reply_cast(HPToken2, V)
end),
{defer, HPToken2}
end,
D),
graphql:map(fun({ok, Packed}) ->
{ok, binary_to_term(Packed)}
end,
X);
<<"hp">> -> {ok, HP};
<<"inventory">> ->
Data = [dungeon:load(OID) || OID <- Inventory],
Expand Down