Skip to content

Commit f6e2ac4

Browse files
committed
improvement: add new {:function, ... type
closes #105
1 parent f297c35 commit f6e2ac4

File tree

6 files changed

+300
-12
lines changed

6 files changed

+300
-12
lines changed

lib/spark/elixir_sense/plugin.ex

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -956,6 +956,26 @@ defmodule Spark.ElixirSense.Plugin do
956956
end
957957
"""
958958

959+
match?({:function, _}, config[:type]) ->
960+
{:function, opts} = config[:type]
961+
arity = opts[:arity] || length(opts[:args] || [])
962+
963+
if arity == 0 do
964+
"""
965+
fn ->
966+
${0:body}
967+
end
968+
"""
969+
else
970+
args = Enum.map_join(0..(arity - 1), ", ", &"arg#{&1 + 1}")
971+
972+
"""
973+
fn #{args} ->
974+
${#{arity}:body}
975+
end
976+
"""
977+
end
978+
959979
match?(:map, config[:type]) || match?({:map, _, _}, config[:type]) ->
960980
"%{${1:key} => ${2:value}}"
961981

lib/spark/info_generator.ex

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -350,6 +350,25 @@ defmodule Spark.InfoGenerator do
350350
[{:->, [], [args, {:any, [], Elixir}]}]
351351
end
352352

353+
def spec_for_type({:function, opts}, _opts) do
354+
args =
355+
if arg_types = opts[:args] do
356+
Enum.map(arg_types, &spec_for_type(&1, []))
357+
else
358+
arity = opts[:arity] || 0
359+
List.duplicate({:any, [], Elixir}, arity)
360+
end
361+
362+
returns =
363+
if return_type = opts[:returns] do
364+
spec_for_type(return_type, [])
365+
else
366+
{:any, [], Elixir}
367+
end
368+
369+
[{:->, [], [args, returns]}]
370+
end
371+
353372
# Treat `and` like `or` because any of the input types is valid.
354373
def spec_for_type({:and, subtypes}, opts), do: spec_for_type({:or, subtypes}, opts)
355374

lib/spark/options/docs.ex

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -179,6 +179,7 @@ defmodule Spark.Options.Docs do
179179
defp get_raw_type_str(:fun), do: "function"
180180
defp get_raw_type_str({:fun, _}), do: "function"
181181
defp get_raw_type_str({:fun, _, _}), do: "function"
182+
defp get_raw_type_str({:function, _}), do: "function"
182183

183184
defp get_raw_type_str({:wrap_list, subtype}) do
184185
if subtype_str = get_raw_type_str(subtype), do: "one or a list of #{subtype_str}"
@@ -291,6 +292,28 @@ defmodule Spark.Options.Docs do
291292
"(#{args} -> any)"
292293
end
293294

295+
def dsl_docs_type({:function, opts}) when is_list(opts) do
296+
args = opts[:args]
297+
returns = opts[:returns]
298+
arity = opts[:arity] || (args && length(args))
299+
300+
cond do
301+
args && returns ->
302+
args_str = Enum.map_join(args, ", ", &dsl_docs_type/1)
303+
"(#{args_str} -> #{dsl_docs_type(returns)})"
304+
305+
args ->
306+
args_str = Enum.map_join(args, ", ", &dsl_docs_type/1)
307+
"(#{args_str} -> any)"
308+
309+
arity ->
310+
dsl_docs_type({:fun, arity})
311+
312+
true ->
313+
"(... -> any)"
314+
end
315+
end
316+
294317
def dsl_docs_type({:fun, args}) when is_list(args) do
295318
args =
296319
Enum.map_join(args, ", ", &dsl_docs_type/1)
@@ -443,6 +466,18 @@ defmodule Spark.Options.Docs do
443466
{:fun, arg_types, return_type} ->
444467
function_spec(arg_types, return_type)
445468

469+
{:function, opts} ->
470+
args = opts[:args]
471+
returns = opts[:returns]
472+
arity = opts[:arity] || (args && length(args))
473+
474+
cond do
475+
args && returns -> function_spec(args, returns)
476+
args -> function_spec(args)
477+
arity -> function_spec(arity)
478+
true -> quote(do: (... -> term()))
479+
end
480+
446481
{:in, %Range{first: first, last: last} = range} ->
447482
if Map.get(range, :step) in [nil, 1] do
448483
quote(do: unquote(first)..unquote(last))

lib/spark/options/options.ex

Lines changed: 80 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -205,6 +205,12 @@ defmodule Spark.Options do
205205
206206
* `{:fun, args_types, return_type}` - A function with the specified arguments and return type.
207207
208+
* `{:function, arity: n, args: [...], returns: type}` - A function type with named options. All keys are optional.
209+
`:arity` specifies the arity, `:args` specifies argument types, and `:returns` specifies the return type.
210+
For example, `{:function, args: [:map], returns: :string}` is a function taking a map and returning a string.
211+
This format is preferred over the 3-tuple `{:fun, args, return_type}` as it is a 2-tuple and avoids
212+
AST escaping issues when used in nested type positions.
213+
208214
* `{:in, choices}` or `{:one_of, choices}` - A value that is a member of one of the `choices`. `choices`
209215
should be a list of terms or a `Range`. The value is an element in said
210216
list of terms, that is, `value in choices` is `true`.
@@ -1220,6 +1226,11 @@ defmodule Spark.Options do
12201226
)
12211227
end
12221228

1229+
defp validate_type({:function, opts}, key, value) when is_list(opts) do
1230+
arity = opts[:arity] || length(opts[:args] || [])
1231+
validate_type({:fun, arity}, key, value)
1232+
end
1233+
12231234
defp validate_type(nil, _key, nil), do: {:ok, nil}
12241235

12251236
defp validate_type(nil, key, value),
@@ -1707,7 +1718,8 @@ defmodule Spark.Options do
17071718
"{:list, subtype}",
17081719
"{:tuple, list_of_subtypes}",
17091720
"{:map, key_type, value_type}",
1710-
"{:struct, struct_name}"
1721+
"{:struct, struct_name}",
1722+
"{:function, arity: n, args: [...], returns: type}"
17111723
]
17121724

17131725
Enum.join(types, ", ")
@@ -1738,17 +1750,7 @@ defmodule Spark.Options do
17381750
end
17391751

17401752
def validate_type({:fun, list}) when is_list(list) do
1741-
Enum.reduce_while(list, {:ok, list}, fn
1742-
{type, _keys}, acc
1743-
when type in [:keyword_list, :non_empty_keyword_list, :map] ->
1744-
{:cont, acc}
1745-
1746-
subtype, acc ->
1747-
case validate_type(subtype) do
1748-
{:ok, _value} -> {:cont, acc}
1749-
{:error, reason} -> {:halt, {:error, "invalid type given to :fun type: #{reason}"}}
1750-
end
1751-
end)
1753+
validate_fun_arg_types(list)
17521754
end
17531755

17541756
def validate_type({:fun, list, returns}) when is_list(list) do
@@ -1778,6 +1780,35 @@ defmodule Spark.Options do
17781780
end
17791781
end
17801782

1783+
def validate_type({:function, opts}) when is_list(opts) do
1784+
arity = opts[:arity]
1785+
args = opts[:args]
1786+
returns = opts[:returns]
1787+
1788+
extra_keys = Keyword.keys(opts) -- [:arity, :args, :returns]
1789+
1790+
cond do
1791+
extra_keys != [] ->
1792+
{:error,
1793+
"invalid keys #{inspect(extra_keys)} in function type. Only :arity, :args, and :returns are supported"}
1794+
1795+
arity != nil && (!is_integer(arity) || arity < 0) ->
1796+
{:error, "expected :arity to be a non-negative integer, got: #{inspect(arity)}"}
1797+
1798+
args != nil && !is_list(args) ->
1799+
{:error, "expected :args to be a list of types, got: #{inspect(args)}"}
1800+
1801+
arity != nil && args != nil && length(args) != arity ->
1802+
{:error, "expected :args length (#{length(args)}) to match :arity (#{arity})"}
1803+
1804+
true ->
1805+
with {:ok, _} <- validate_fun_args(args || []),
1806+
{:ok, _} <- validate_fun_returns(returns) do
1807+
{:ok, {:function, opts}}
1808+
end
1809+
end
1810+
end
1811+
17811812
def validate_type({:spark_type, module, builtin_function} = type)
17821813
when is_atom(module) and is_atom(builtin_function) do
17831814
{:ok, type}
@@ -1950,6 +1981,43 @@ defmodule Spark.Options do
19501981
{:error, "unknown type #{inspect(value)}.\n\nAvailable types: #{available_types()}"}
19511982
end
19521983

1984+
defp validate_fun_arg_types(list) do
1985+
Enum.reduce_while(list, {:ok, list}, fn
1986+
{type, _keys}, acc
1987+
when type in [:keyword_list, :non_empty_keyword_list, :map] ->
1988+
{:cont, acc}
1989+
1990+
subtype, acc ->
1991+
case validate_type(subtype) do
1992+
{:ok, _value} -> {:cont, acc}
1993+
{:error, reason} -> {:halt, {:error, "invalid type given to :fun type: #{reason}"}}
1994+
end
1995+
end)
1996+
end
1997+
1998+
defp validate_fun_args(args) do
1999+
Enum.reduce_while(args, {:ok, args}, fn
2000+
{type, _keys}, acc
2001+
when type in [:keyword_list, :non_empty_keyword_list, :map] ->
2002+
{:cont, acc}
2003+
2004+
subtype, acc ->
2005+
case validate_type(subtype) do
2006+
{:ok, _value} -> {:cont, acc}
2007+
{:error, reason} -> {:halt, {:error, "invalid type given to :fun type: #{reason}"}}
2008+
end
2009+
end)
2010+
end
2011+
2012+
defp validate_fun_returns(nil), do: {:ok, nil}
2013+
2014+
defp validate_fun_returns(returns) do
2015+
case validate_type(returns) do
2016+
{:ok, _} -> {:ok, returns}
2017+
{:error, error} -> {:error, "invalid return type given to :fun type: #{error}"}
2018+
end
2019+
end
2020+
19532021
defp error_tuple(key, value, message) do
19542022
{:error, %ValidationError{key: key, message: message, value: value}}
19552023
end

test/info_generator_test.exs

Lines changed: 138 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,4 +20,142 @@ defmodule Spark.InfoGeneratorTest do
2020
assert %{} = Info.my_section_map_option!(Resource)
2121
end
2222
end
23+
24+
describe "{:function, ...} type" do
25+
test "compiles and generates info functions for {:function, args: [...], returns: ...}" do
26+
assert :error = Info.my_section_callback(Resource)
27+
end
28+
29+
test "compiles when used inside {:or, ...}" do
30+
assert :error = Info.my_section_handler(Resource)
31+
end
32+
end
33+
34+
describe "{:function, ...} type validation" do
35+
test "accepts args with returns" do
36+
assert {:ok, {:function, args: [:map], returns: :string}} =
37+
Spark.Options.validate_type({:function, args: [:map], returns: :string})
38+
end
39+
40+
test "accepts arity only" do
41+
assert {:ok, {:function, arity: 2}} =
42+
Spark.Options.validate_type({:function, arity: 2})
43+
end
44+
45+
test "accepts args only" do
46+
assert {:ok, {:function, args: [:string, :integer]}} =
47+
Spark.Options.validate_type({:function, args: [:string, :integer]})
48+
end
49+
50+
test "accepts arity with matching args" do
51+
assert {:ok, {:function, arity: 2, args: [:map, :string]}} =
52+
Spark.Options.validate_type({:function, arity: 2, args: [:map, :string]})
53+
end
54+
55+
test "accepts returns only" do
56+
assert {:ok, {:function, returns: :string}} =
57+
Spark.Options.validate_type({:function, returns: :string})
58+
end
59+
60+
test "rejects mismatched arity and args" do
61+
assert {:error, message} =
62+
Spark.Options.validate_type({:function, arity: 3, args: [:map, :string]})
63+
64+
assert message =~ "args length"
65+
end
66+
67+
test "rejects invalid keys" do
68+
assert {:error, message} =
69+
Spark.Options.validate_type({:function, foo: :bar})
70+
71+
assert message =~ "invalid keys"
72+
end
73+
74+
test "rejects negative arity" do
75+
assert {:error, message} =
76+
Spark.Options.validate_type({:function, arity: -1})
77+
78+
assert message =~ "non-negative integer"
79+
end
80+
81+
test "rejects non-integer arity" do
82+
assert {:error, message} =
83+
Spark.Options.validate_type({:function, arity: :two})
84+
85+
assert message =~ "non-negative integer"
86+
end
87+
88+
test "rejects non-list args" do
89+
assert {:error, message} =
90+
Spark.Options.validate_type({:function, args: :map})
91+
92+
assert message =~ "list of types"
93+
end
94+
95+
test "validates nested arg types" do
96+
assert {:error, _} =
97+
Spark.Options.validate_type({:function, args: [:not_a_real_type]})
98+
end
99+
100+
test "validates nested return type" do
101+
assert {:error, _} =
102+
Spark.Options.validate_type({:function, returns: :not_a_real_type})
103+
end
104+
end
105+
106+
describe "{:function, ...} runtime validation" do
107+
test "accepts a function matching arity from args" do
108+
schema = [cb: [type: {:function, args: [:map], returns: :string}]]
109+
assert {:ok, _} = Spark.Options.validate([cb: fn _ -> "ok" end], schema)
110+
end
111+
112+
test "accepts a function matching explicit arity" do
113+
schema = [cb: [type: {:function, arity: 2}]]
114+
assert {:ok, _} = Spark.Options.validate([cb: fn _, _ -> :ok end], schema)
115+
end
116+
117+
test "rejects a function with wrong arity" do
118+
schema = [cb: [type: {:function, args: [:map], returns: :string}]]
119+
assert {:error, _} = Spark.Options.validate([cb: fn -> "ok" end], schema)
120+
end
121+
122+
test "rejects non-function values" do
123+
schema = [cb: [type: {:function, args: [:map]}]]
124+
assert {:error, _} = Spark.Options.validate([cb: "not_a_function"], schema)
125+
end
126+
127+
test "works inside {:or, ...} at runtime" do
128+
schema = [cb: [type: {:or, [:string, {:function, args: [:map]}]}]]
129+
assert {:ok, _} = Spark.Options.validate([cb: "a string"], schema)
130+
assert {:ok, _} = Spark.Options.validate([cb: fn _ -> :ok end], schema)
131+
assert {:error, _} = Spark.Options.validate([cb: 42], schema)
132+
end
133+
end
134+
135+
describe "{:function, ...} documentation" do
136+
test "dsl_docs_type with args and returns" do
137+
assert "(map -> String.t)" =
138+
Spark.Options.Docs.dsl_docs_type({:function, args: [:map], returns: :string})
139+
end
140+
141+
test "dsl_docs_type with args only" do
142+
assert "(map, String.t -> any)" =
143+
Spark.Options.Docs.dsl_docs_type({:function, args: [:map, :string]})
144+
end
145+
146+
test "dsl_docs_type with arity only" do
147+
assert "(any, any -> any)" =
148+
Spark.Options.Docs.dsl_docs_type({:function, arity: 2})
149+
end
150+
151+
test "dsl_docs_type with zero arity" do
152+
assert "(-> any)" =
153+
Spark.Options.Docs.dsl_docs_type({:function, arity: 0})
154+
end
155+
156+
test "dsl_docs_type with returns only" do
157+
assert "(... -> any)" =
158+
Spark.Options.Docs.dsl_docs_type({:function, returns: :string})
159+
end
160+
end
23161
end

0 commit comments

Comments
 (0)