Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 95 additions & 0 deletions lib/absinthe/plug/request.ex
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,38 @@ defmodule Absinthe.Plug.Request do
#

@spec extract_body_and_params(Plug.Conn.t(), map()) :: {:ok, Plug.Conn.t(), String.t(), map()}

# Handle standard graphql-multipart-request-spec format.
# See: https://github.com/jaydenseric/graphql-multipart-request-spec
#
# This is the format used by Apollo Client, urql, Relay, and most GraphQL
# clients for file uploads. The request contains three parts:
# - `operations`: JSON with the query and variables (file slots are null)
# - `map`: JSON mapping form field names to variable paths
# - numbered fields (0, 1, ...): the actual files
#
# This clause transforms the standard format into Absinthe's native format
# by replacing null variable slots with string references to the form field
# names, so the existing :upload scalar resolver can pick them up unchanged.
defp extract_body_and_params(
%{body_params: %{"operations" => operations, "map" => map_json}} = conn,
config
) do
conn = fetch_query_params(conn)

with {:ok, ops} <- config.json_codec.module.decode(operations),
{:ok, file_map} <- config.json_codec.module.decode(map_json) do
if is_list(ops) do
extract_body_and_params_standard_batch(conn, ops, file_map)
else
extract_body_and_params_standard_single(conn, ops, file_map)
end
else
{:error, _} ->
{:input_error, "Could not parse multipart operations or map as JSON"}
end
end

defp extract_body_and_params(%{body_params: %{"query" => _}} = conn, _config) do
conn = fetch_query_params(conn)
{:ok, conn, "", conn.params}
Expand All @@ -122,6 +154,69 @@ defmodule Absinthe.Plug.Request do
end
end

defp extract_body_and_params_standard_single(conn, ops, file_map) do
variables = apply_file_map(ops["variables"] || %{}, file_map)

params =
conn.params
|> Map.put("query", ops["query"])
|> Map.put("variables", variables)
|> Map.put("operationName", ops["operationName"])

{:ok, conn, "", params}
end

defp extract_body_and_params_standard_batch(conn, ops_list, file_map) do
json_list =
ops_list
|> Enum.with_index()
|> Enum.map(fn {ops, idx} ->
batch_file_map =
file_map
|> Enum.filter(fn {_field, paths} ->
Enum.any?(paths, &String.starts_with?(&1, "#{idx}."))
end)
|> Enum.map(fn {field, paths} ->
{field, Enum.map(paths, &String.replace_prefix(&1, "#{idx}.", ""))}
end)
|> Map.new()

variables = apply_file_map(ops["variables"] || %{}, batch_file_map)

%{
"query" => ops["query"],
"variables" => variables,
"operationName" => ops["operationName"]
}
end)

params = Map.put(conn.params, "_json", json_list)
{:ok, conn, "", params}
end

defp apply_file_map(variables, file_map) do
Enum.reduce(file_map, variables, fn {field_name, paths}, vars ->
Enum.reduce(paths, vars, fn path, v ->
keys = path |> String.replace_prefix("variables.", "") |> String.split(".")
deep_put(v, keys, field_name)
end)
end)
end

defp deep_put(map, [key], value) when is_map(map), do: Map.put(map, key, value)

defp deep_put(list, [index], value) when is_list(list) do
List.replace_at(list, String.to_integer(index), value)
end

defp deep_put(map, [key | rest], value) when is_map(map) do
Map.update(map, key, deep_put(%{}, rest, value), &deep_put(&1, rest, value))
end

defp deep_put(list, [index | rest], value) when is_list(list) do
List.update_at(list, String.to_integer(index), &deep_put(&1, rest, value))
end

defp convert_operations_param(conn = %{params: %{"operations" => operations}})
when is_binary(operations) do
put_in(conn.params["_json"], conn.params["operations"])
Expand Down
101 changes: 101 additions & 0 deletions test/lib/absinthe/plug_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -423,6 +423,107 @@
end
end

describe "file uploads via standard graphql-multipart-request-spec" do
setup [:basic_opts]

@upload_mutation "query ($fileA: Upload!) { uploadTest(fileA: $fileA) }"

test "single file upload", %{opts: opts} do
upload = %Plug.Upload{}

operations =
Jason.encode!(%{query: @upload_mutation, variables: %{fileA: nil}})

map = Jason.encode!(%{"0" => ["variables.fileA"]})

assert %{status: 200, resp_body: resp_body} =
conn(:post, "/", %{
"operations" => operations,
"map" => map,
"0" => upload
})
|> put_req_header("content-type", "multipart/form-data")
|> call(opts)

assert resp_body == %{"data" => %{"uploadTest" => "file_a"}}
end

test "multiple file uploads", %{opts: opts} do
query = "query ($a: Upload!, $b: Upload) { uploadTest(fileA: $a, fileB: $b) }"
upload = %Plug.Upload{}

operations =
Jason.encode!(%{query: query, variables: %{a: nil, b: nil}})

map = Jason.encode!(%{"0" => ["variables.a"], "1" => ["variables.b"]})

assert %{status: 200, resp_body: resp_body} =
conn(:post, "/", %{
"operations" => operations,
"map" => map,
"0" => upload,
"1" => upload
})
|> put_req_header("content-type", "multipart/form-data")
|> call(opts)

assert resp_body == %{"data" => %{"uploadTest" => "file_a, file_b"}}
end

test "upload with additional variables", %{opts: opts} do
query = "query ($a: Upload!, $auth: String) { uploadTest(fileA: $a, auth: $auth) }"
upload = %Plug.Upload{}

operations =
Jason.encode!(%{query: query, variables: %{a: nil, auth: "foo"}})

map = Jason.encode!(%{"0" => ["variables.a"]})

assert %{status: 200, resp_body: resp_body} =
conn(:post, "/", %{
"operations" => operations,
"map" => map,
"0" => upload
})
|> put_req_header("content-type", "multipart/form-data")
|> call(opts)

assert resp_body == %{"data" => %{"uploadTest" => "auth, file_a"}}
end

test "returns error with invalid operations JSON", %{opts: opts} do
assert %{status: 400, resp_body: resp_body} =
conn(:post, "/", %{
"operations" => "not json",
"map" => "{}"
})
|> put_req_header("content-type", "multipart/form-data")
|> call(opts)

assert %{"errors" => [%{"message" => msg}]} = resp_body
assert msg =~ "Could not parse multipart"
end

test "returns error when required upload is missing from map", %{opts: opts} do
operations =
Jason.encode!(%{query: @upload_mutation, variables: %{fileA: nil}})

# empty map - no file mapped to the variable
map = Jason.encode!(%{})

assert %{status: 200, resp_body: resp_body} =
conn(:post, "/", %{
"operations" => operations,
"map" => map
})
|> put_req_header("content-type", "multipart/form-data")
|> call(opts)

assert %{"errors" => errors} = resp_body
assert length(errors) > 0
end
end

test "it works with basic documents and complexity limits" do
opts = Absinthe.Plug.init(schema: TestSchema, max_complexity: 100, analyze_complexity: true)

Expand All @@ -449,7 +550,7 @@
assert expected == resp_body
end

test "Subscriptions over HTTP with Server Sent Events chunked response (non standard)" do

Check failure on line 553 in test/lib/absinthe/plug_test.exs

View workflow job for this annotation

GitHub Actions / test (1.14, 25)

test Subscriptions over HTTP with Server Sent Events chunked response (non standard) (Absinthe.PlugTest)
TestPubSub.start_link()
Absinthe.Subscription.start_link(TestPubSub)

Expand Down
Loading