Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions lib/spark/info_generator.ex
Original file line number Diff line number Diff line change
Expand Up @@ -350,6 +350,9 @@ defmodule Spark.InfoGenerator do
[{:->, [], [args, {:any, [], Elixir}]}]
end

# Treat `and` like `or` because any of the input types is valid.
def spec_for_type({:and, subtypes}, opts), do: spec_for_type({:or, subtypes}, opts)
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

lol

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would be nice to not have to do this. We may want to do something to make it look nice in docs?

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually yeah I don't see a handler for the docs bit here.


def spec_for_type({:or, [type]}, _opts), do: spec_for_type(type, [])

def spec_for_type({:or, [next | remaining]}, _opts),
Expand Down
74 changes: 74 additions & 0 deletions lib/spark/options/options.ex
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,15 @@ defmodule Spark.Options do
the list is empty, the function must have exactly one argument, i.e. `{:custom, mod, fun, []}`
expects `mod.fun/1` to exist.

* `{:and, subtypes}` - A value that matches all of the given `subtypes`. The value is
matched against the subtypes in the order specified in the list of `subtypes`. If
all of the subtypes match then the value is valid. If one of the subtypes matches and **updates** (casts) a given value, then value is updated and
passed in to any subsequent checks.
If one of the subtypes is a keyword list or map, you won't be able to pass
`:keys` directly. For this reason `:keyword_list`, `:non_empty_keyword_list`,
and `:map` are special cased and can be used as subtypes with
`{:keyword_list, keys}`, `{:non_empty_keyword_list, keys}` or `{:map, keys}`.

* `{:or, subtypes}` - A value that matches one of the given `subtypes`. The value is
matched against the subtypes in the order specified in the list of `subtypes`. If
one of the subtypes matches and **updates** (casts) the given value, the updated
Expand Down Expand Up @@ -437,6 +446,8 @@ defmodule Spark.Options do
| {:fun, list(type)}
| {:fun, list(type), type}
| {:in, [any] | Range.t()}
| {:and,
[type | {:keyword_list, schema} | {:non_empty_keyword_list, schema} | {:map, schema}]}
| {:or,
[type | {:keyword_list, schema} | {:non_empty_keyword_list, schema} | {:map, schema}]}
| {:list,
Expand Down Expand Up @@ -1245,6 +1256,54 @@ defmodule Spark.Options do
end
end

defp validate_type({:and, subtypes}, key, value) do
result =
Enum.reduce_while(subtypes, {value, []}, fn subtype, {current_value, errors_acc} ->
{subtype, nested_schema} =
case subtype do
{type, keys} when type in [:keyword_list, :non_empty_keyword_list, :map] ->
{type, keys}

other ->
{other, _nested_schema = nil}
end

case validate_type(subtype, key, current_value) do
{:ok, validated_value} when not is_nil(nested_schema) ->
case validate_options_with_schema_and_path(
validated_value,
nested_schema,
_path = [key]
) do
{:ok, validated_value} ->
{:cont, {validated_value, errors_acc}}

{:error, %ValidationError{} = error} ->
{:cont, {current_value, [error | errors_acc]}}
end

{:ok, validated_value} ->
{:cont, {validated_value, errors_acc}}

{:error, %ValidationError{} = reason} ->
{:cont, {current_value, [reason | errors_acc]}}
end
end)

case result do
{value, []} ->
{:ok, value}

{_value, errors} ->
message =
"expected #{render_key(key)} to match all given types, but didn't match " <>
"all of them. Here are the reasons why it didn't match each of the types:\n\n" <>
Enum.map_join(errors, "\n", &(" * " <> Exception.message(&1)))

error_tuple(key, value, message)
end
end

defp validate_type({:or, subtypes}, key, value) do
result =
Enum.reduce_while(subtypes, _errors = [], fn subtype, errors_acc ->
Expand Down Expand Up @@ -1642,6 +1701,7 @@ defmodule Spark.Options do
[
"{:fun, arity}",
"{:in, choices}",
"{:and, subtypes}",
"{:or, subtypes}",
"{:custom, mod, fun, args}",
"{:list, subtype}",
Expand Down Expand Up @@ -1812,6 +1872,20 @@ defmodule Spark.Options do
end)
end

def validate_type({:and, subtypes} = value) when is_list(subtypes) do
Enum.reduce_while(subtypes, {:ok, value}, fn
{type, _keys}, acc
when type in [:keyword_list, :non_empty_keyword_list, :map] ->
{:cont, acc}

subtype, acc ->
case validate_type(subtype) do
{:ok, _value} -> {:cont, acc}
{:error, reason} -> {:halt, {:error, "invalid type given to :and type: #{reason}"}}
end
end)
end

# This is to support the special-cased "{:list, {:keyword_list, my_key: [type: ...]}}",
# like we do in the :or type.
def validate_type({:list, {type, keys}})
Expand Down
141 changes: 141 additions & 0 deletions test/options/and_type_test.exs
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
# SPDX-FileCopyrightText: 2022 spark contributors <https://github.com/ash-project/spark/graphs/contributors>
#
# SPDX-License-Identifier: MIT

defmodule Spark.Options.AndTypeTest do
@moduledoc false

use ExUnit.Case

describe "{:and, subtypes}" do
test "validates when value matches all subtypes" do
schema = [value: [type: {:and, [:integer, :pos_integer]}]]

assert {:ok, [value: 5]} = Spark.Options.validate([value: 5], schema)
end

test "fails when value doesn't match any subtype" do
schema = [value: [type: {:and, [:integer, :pos_integer]}]]

assert {:error, %Spark.Options.ValidationError{}} =
Spark.Options.validate([value: "not an integer"], schema)
end

test "fails when value matches first subtype but not second" do
schema = [value: [type: {:and, [:integer, :pos_integer]}]]

assert {:error, %Spark.Options.ValidationError{}} =
Spark.Options.validate([value: -5], schema)
end

test "fails when value matches second subtype but not first" do
schema = [value: [type: {:and, [:pos_integer, {:in, [1, 2, 3]}]}]]

assert {:error, %Spark.Options.ValidationError{}} =
Spark.Options.validate([value: 5], schema)
end

test "threads transformed values through subsequent subtypes" do
# First subtype transforms the value, second validates the transformed value
schema = [
value: [
type:
{:and,
[
{:custom, __MODULE__, :double_value, []},
{:in, [10, 20, 30]}
]}
]
]

# 5 gets doubled to 10, which is in [10, 20, 30]
assert {:ok, [value: 10]} = Spark.Options.validate([value: 5], schema)

# 7 gets doubled to 14, which is NOT in [10, 20, 30]
assert {:error, %Spark.Options.ValidationError{}} =
Spark.Options.validate([value: 7], schema)
end

test "works with three or more subtypes" do
schema = [
value: [type: {:and, [:integer, :pos_integer, {:in, 1..10}]}]
]

assert {:ok, [value: 5]} = Spark.Options.validate([value: 5], schema)

assert {:error, %Spark.Options.ValidationError{}} =
Spark.Options.validate([value: 15], schema)
end

test "works with keyword_list subtype" do
schema = [
config: [
type:
{:and,
[
:keyword_list,
{:keyword_list, [name: [type: :string, required: true]]}
]}
]
]

assert {:ok, [config: [name: "test"]]} =
Spark.Options.validate([config: [name: "test"]], schema)

assert {:error, %Spark.Options.ValidationError{}} =
Spark.Options.validate([config: []], schema)
end

test "works with non_empty_keyword_list subtype" do
schema = [
config: [
type:
{:and,
[
:non_empty_keyword_list,
{:keyword_list, [name: [type: :string]]}
]}
]
]

assert {:ok, [config: [name: "test"]]} =
Spark.Options.validate([config: [name: "test"]], schema)

assert {:error, %Spark.Options.ValidationError{}} =
Spark.Options.validate([config: []], schema)
end

test "works with map subtype" do
schema = [
config: [
type:
{:and,
[
:map,
{:map, [name: [type: :string, required: true]]}
]}
]
]

assert {:ok, [config: %{name: "test"}]} =
Spark.Options.validate([config: %{name: "test"}], schema)

assert {:error, %Spark.Options.ValidationError{}} =
Spark.Options.validate([config: %{}], schema)
end

test "error message mentions all failing subtypes" do
schema = [value: [type: {:and, [:integer, :string]}]]

assert {:error, %Spark.Options.ValidationError{message: message}} =
Spark.Options.validate([value: :atom], schema)

assert message =~ "integer"
assert message =~ "string"
end
end

# Helper function for custom type that doubles the value
def double_value(value) when is_integer(value), do: {:ok, value * 2}
def double_value(value), do: {:error, "expected integer, got: #{inspect(value)}"}
end