diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..913bfe5 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,116 @@ +name: CI + +on: + push: + branches: [main] + pull_request: + +jobs: + setup: + name: Setup for Elixir ${{matrix.elixir}} / OTP ${{matrix.otp}} + runs-on: ubuntu-latest + strategy: &strategy + matrix: + elixir: ["1.15", "1.16", "1.17", "1.18", "1.19"] + otp: ["25", "26", "27", "28"] + # see https://hexdocs.pm/elixir/compatibility-and-deprecations.html#between-elixir-and-erlang-otp + exclude: + [ + { elixir: 1.15, otp: 27 }, + { elixir: 1.15, otp: 28 }, + { elixir: 1.16, otp: 27 }, + { elixir: 1.16, otp: 28 }, + { elixir: 1.17, otp: 28 }, + { elixir: 1.19, otp: 25 }, + ] + steps: + - &checkout + name: Checkout + uses: actions/checkout@v4 + + - &setup + name: Set up Elixir + uses: erlef/setup-beam@v1 + with: + otp-version: ${{ matrix.otp }} + elixir-version: ${{ matrix.elixir }} + + - &cache + name: Restore deps cache + uses: actions/cache@v4 + with: + path: | + deps + _build + key: deps-${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }}-${{ hashFiles('**/mix.lock') }}-${{ github.sha }} + restore-keys: | + deps-${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }}-${{ hashFiles('**/mix.lock') }} + deps-${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }} + + - name: Install package dependencies + run: mix deps.get + + - name: Compile dependencies + run: mix compile + env: + MIX_ENV: ${{ inputs.mix_env }} + + test: + name: Test on Elixir ${{matrix.elixir}} / OTP ${{matrix.otp}} + runs-on: ubuntu-latest + needs: [setup] + strategy: *strategy + steps: + - *checkout + - *setup + - *cache + - name: Run unit tests + run: mix test + + dependencies: + name: Check dependencies + runs-on: ubuntu-latest + needs: [setup] + strategy: + matrix: { elixir: ["1.19"], otp: ["28"] } + steps: + - *checkout + - name: Set up Elixir + uses: erlef/setup-beam@v1 + with: { elixir-version: 1.19, otp-version: 28 } + - *cache + - name: Check for unused dependencies + run: mix deps.unlock --check-unused + - name: Check for retired dependencies + run: mix hex.audit + - name: Check for dependencies for known vulnerabilities + run: mix hex.audit + + format: + name: Check formatting + runs-on: ubuntu-latest + needs: [setup] + strategy: + matrix: { elixir: ["1.19"], otp: ["28"] } + steps: + - *checkout + - name: Set up Elixir + uses: erlef/setup-beam@v1 + with: { elixir-version: 1.19, otp-version: 28 } + - *cache + - name: Check code format + run: mix format --check-formatted + + release: + name: Create or Update Release PR + runs-on: ubuntu-latest + needs: [test, dependencies, format] + if: github.ref == 'refs/heads/main' + permissions: + contents: write + pull-requests: write + steps: + - uses: googleapis/release-please-action@v4 + with: + token: ${{ secrets.REPO_GITHUB_TOKEN }} + release-type: elixir diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml new file mode 100644 index 0000000..eca3706 --- /dev/null +++ b/.github/workflows/pr.yml @@ -0,0 +1,16 @@ +name: Validate PR Title + +on: + pull_request_target: + types: [opened, synchronize, edited, reopened] + +jobs: + main: + name: Validate PR title + runs-on: ubuntu-latest + permissions: + pull-requests: read + steps: + - uses: amannn/action-semantic-pull-request@v5 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..9ec1fe1 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,42 @@ +name: Publish Hex Package + +on: + release: + types: [published] + workflow_dispatch: + inputs: + tag: + description: "The git tag for the release to publish" + type: string + required: true + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - name: Check out + uses: actions/checkout@v3 + with: + ref: ${{ inputs.tag || github.ref }} + + - name: Set up Elixir + uses: erlef/setup-beam@v1 + with: + otp-version: 28 + elixir-version: 1.19 + + - name: Restore deps cache + uses: actions/cache@v4 + with: + path: | + deps + _build + key: deps-${{ runner.os }}-28-1.19-${{ hashFiles('**/mix.lock') }}-${{ github.sha }} + restore-keys: | + deps-${{ runner.os }}-28.1.19-${{ hashFiles('**/mix.lock') }} + deps-${{ runner.os }}-28-1.19-${{ matrix.otp }}-${{ matrix.elixir }} + + - name: Publish to hex.pm + env: + HEX_API_KEY: ${{ secrets.HEX_API_KEY }} + run: mix hex.publish --yes diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 2464277..0000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,63 +0,0 @@ -name: CI - -on: - pull_request: - push: - branches: - - main - -jobs: - test: - name: Elixir ${{matrix.pair.elixir}} / OTP ${{matrix.pair.otp}} - runs-on: ubuntu-20.04 - - strategy: - matrix: - include: - - pair: - elixir: "1.12" - otp: "22" - - pair: - elixir: "1.18" - otp: "27" - lint: true - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Set up Elixir - uses: erlef/setup-beam@v1 - with: - elixir-version: ${{ matrix.pair.elixir }} - otp-version: ${{ matrix.pair.otp }} - - - name: Restore deps cache - uses: actions/cache@v4 - with: - path: | - deps - _build - key: deps-${{ runner.os }}-${{ matrix.pair.otp }}-${{ matrix.pair.elixir }}-${{ hashFiles('**/mix.lock') }}-git-${{ github.sha }} - restore-keys: | - deps-${{ runner.os }}-${{ matrix.pair.otp }}-${{ matrix.pair.elixir }}-${{ hashFiles('**/mix.lock') }} - deps-${{ runner.os }}-${{ matrix.pair.otp }}-${{ matrix.pair.elixir }} - - - name: Install package dependencies - run: mix deps.get - - - name: Check unused deps - run: mix deps.unlock --check-unused - if: ${{ matrix.lint }} - - - name: Check code format - run: mix format --check-formatted - if: ${{ matrix.lint }} - - - name: Compile dependencies - run: mix compile - env: - MIX_ENV: test - - - name: Run unit tests - run: mix test diff --git a/CHANGELOG.md b/CHANGELOG.md index 93d9e07..99a94ad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.6.0](https://github.com/absinthe-graphql/absinthe_relay/compare/v1.5.2...v1.6.0) (2025-11-06) + +### Miscellaneous Chores + +* release 1.6.0 ([4174c0a](https://github.com/absinthe-graphql/absinthe_relay/commit/4174c0a83bea8d0fa5e3034f379acb203e01b805)) +* update minimum elixir version +* update absinthe dependency mininum version + ## 1.4.4 - 2018-09-20 - Feature: Enhancements to Connection macros to support extensibility of edge types. See [PR #109](https://github.com/absinthe-graphql/absinthe_relay/pull/109) (Thanks, @coderdan!) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f77b212..6d90ac1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -24,7 +24,7 @@ Make your change. Add tests for your change. Make the tests pass: Push to your fork (preferably to a non-`master` branch) and [submit a pull request][pr]. -[pr]: https://github.com/absinthe-graphql/absinthe_relay/compare/ +Ensure your pull request title follows [conventional commit](https://www.conventionalcommits.org/en/v1.0.0/) specification. We'll review and answer your pull request as soon as possible. We may suggest some changes, improvements, or alternatives. Let's work through it together. diff --git a/README.md b/README.md index b2f69f4..34970f7 100644 --- a/README.md +++ b/README.md @@ -105,6 +105,49 @@ Relay supports mutation via [a contract](https://facebook.github.io/relay/docs/e See the [Absinthe.Relay.Mutation](https://hexdocs.pm/absinthe_relay/Absinthe.Relay.Mutation.html) module documentation for specific instructions on how to design a schema that makes use of mutations. +### Incremental Delivery + +Absinthe.Relay supports GraphQL `@defer` and `@stream` directives for incremental delivery with Relay connections. This enables streaming of connection edges while maintaining proper cursor consistency and connection structure. + +Key features: +- ✅ **Relay Specification**: Full compliance with Relay Cursor Connection spec +- ✅ **Cursor Consistency**: Maintains proper cursor ordering during streaming +- ✅ **Connection Structure**: Preserves `pageInfo` and connection metadata +- ✅ **Bidirectional Pagination**: Supports forward and backward streaming + +**Installation with incremental delivery:** + +```elixir +def deps do + [ + {:absinthe, git: "https://github.com/gigsmart/absinthe.git", branch: "gigmart/defer-stream-incremental"}, + {:absinthe_relay, git: "https://github.com/gigsmart/absinthe_relay.git", branch: "gigmart/defer-stream-incremental"} + ] +end +``` + +**Example usage:** + +```graphql +query GetPosts($first: Int!, $after: String) { + posts(first: $first, after: $after) @stream(initialCount: 2, label: "posts") { + pageInfo { + hasNextPage + endCursor + } + edges { + cursor + node { + id + title + } + } + } +} +``` + +For comprehensive documentation on Relay incremental delivery patterns, see [Absinthe Incremental Delivery Guide](https://hexdocs.pm/absinthe/incremental-delivery.html). + ## Supporting the Babel Relay Plugin To generate a `schema.json` file for use with the [Babel Relay Plugin](https://facebook.github.io/relay/docs/en/installation-and-setup.html#set-up-babel-plugin-relay), run the `absinthe.schema.json` Mix task, built-in to [Absinthe](https://github.com/absinthe-graphql/absinthe). diff --git a/lib/absinthe/relay/incremental/connection.ex b/lib/absinthe/relay/incremental/connection.ex new file mode 100644 index 0000000..6646e1f --- /dev/null +++ b/lib/absinthe/relay/incremental/connection.ex @@ -0,0 +1,349 @@ +defmodule Absinthe.Relay.Incremental.Connection do + @moduledoc """ + Streaming support for Relay connections. + + This module enables @stream directive to work correctly with Relay's + connection pattern, ensuring proper cursor handling and pagination + with incremental delivery. + """ + + alias Absinthe.Relay.Connection + + @type stream_config :: %{ + initial_count: non_neg_integer(), + label: String.t() | nil, + path: list() + } + + @type streaming_connection :: %{ + initial: Connection.t(), + stream_plan: list(stream_batch()), + total_count: non_neg_integer() + } + + @type stream_batch :: %{ + edges: list(Connection.Edge.t()), + path: list(), + label: String.t() | nil, + start_cursor: String.t(), + end_cursor: String.t() + } + + @doc """ + Convert a Relay connection to support streaming. + + This splits the connection into an initial response and a streaming plan + for the remaining edges. + """ + @spec stream_connection(Connection.t(), stream_config()) :: + {:ok, streaming_connection()} | {:error, term()} + def stream_connection(connection, stream_config) do + initial_count = Map.get(stream_config, :initial_count, 0) + + # Split edges into initial and remaining + {initial_edges, remaining_edges} = + split_edges(connection.edges, initial_count) + + # Build initial connection with updated page info + initial_connection = %{connection | + edges: initial_edges, + page_info: update_page_info_for_streaming( + connection.page_info, + initial_edges, + remaining_edges, + connection + ) + } + + # Create streaming plan for remaining edges + stream_plan = + if Enum.empty?(remaining_edges) do + [] + else + plan_edge_streaming(remaining_edges, stream_config) + end + + {:ok, %{ + initial: initial_connection, + stream_plan: stream_plan, + total_count: length(connection.edges) + }} + end + + @doc """ + Process a streamed batch of edges. + + Returns the edges formatted for incremental delivery with proper + cursor continuity. + """ + @spec process_stream_batch(stream_batch()) :: map() + def process_stream_batch(batch) do + %{ + edges: Enum.map(batch.edges, &format_edge/1), + path: batch.path, + label: batch.label, + pageInfo: %{ + startCursor: batch.start_cursor, + endCursor: batch.end_cursor + } + } + end + + @doc """ + Validate cursor continuity across streamed batches. + + Ensures that cursors maintain proper ordering when edges are + delivered incrementally. + """ + @spec validate_cursor_continuity(list(Connection.Edge.t()), list(Connection.Edge.t())) :: + :ok | {:error, term()} + def validate_cursor_continuity([], _), do: :ok + def validate_cursor_continuity(_, []), do: :ok + + def validate_cursor_continuity(previous_edges, new_edges) do + last_cursor = get_last_cursor(previous_edges) + first_cursor = get_first_cursor(new_edges) + + if follows_cursor?(first_cursor, last_cursor) do + :ok + else + {:error, "Cursor discontinuity detected in streamed connection"} + end + end + + @doc """ + Create a connection that supports streaming from a list of items. + + This is a streaming-aware version of Relay.Connection.from_list. + """ + @spec from_list(list(), map(), Keyword.t()) :: {:ok, Connection.t()} | {:error, term()} + def from_list(items, args, opts \\ []) do + # Check if streaming is requested + case Map.get(args, :stream) do + nil -> + # Standard connection without streaming + Connection.from_list(items, args, opts) + + stream_args -> + # Create streaming connection + build_streaming_connection(items, args, stream_args, opts) + end + end + + @doc """ + Apply @stream directive to a connection field. + + This is used by the schema to mark connection fields for streaming. + """ + @spec stream_field(atom(), Keyword.t()) :: Absinthe.Schema.Notation.field_result() + defmacro stream_field(field_name, opts \\ []) do + quote do + field unquote(field_name), :connection do + # Add streaming metadata + meta :streaming_enabled, true + + # Apply options + unquote(Keyword.get(opts, :do)) + + # Wrap resolver with streaming support + middleware Absinthe.Relay.Incremental.Connection.StreamingMiddleware + end + end + end + + # Private functions + + defp split_edges(edges, initial_count) when initial_count >= 0 do + {Enum.take(edges, initial_count), Enum.drop(edges, initial_count)} + end + + defp update_page_info_for_streaming(page_info, initial_edges, remaining_edges, connection) do + has_more = not Enum.empty?(remaining_edges) + + %{page_info | + # Indicate more edges are coming via streaming + has_next_page: page_info.has_next_page or has_more, + # Update end cursor to last initial edge if we have any + end_cursor: get_last_cursor(initial_edges) || page_info.end_cursor, + # Keep start cursor from first edge + start_cursor: get_first_cursor(initial_edges) || page_info.start_cursor + } + end + + defp plan_edge_streaming(edges, config) do + batch_size = calculate_stream_batch_size(config) + + edges + |> Enum.chunk_every(batch_size) + |> Enum.with_index() + |> Enum.map(fn {edge_batch, index} -> + %{ + edges: edge_batch, + path: config.path ++ ["edges"], + label: build_batch_label(config.label, index), + start_cursor: get_first_cursor(edge_batch), + end_cursor: get_last_cursor(edge_batch) + } + end) + end + + defp calculate_stream_batch_size(config) do + # Determine optimal batch size based on configuration + Map.get(config, :batch_size, 10) + end + + defp format_edge(edge) do + %{ + node: edge.node, + cursor: edge.cursor + } + end + + defp get_first_cursor([]), do: nil + defp get_first_cursor([edge | _]), do: edge.cursor + + defp get_last_cursor([]), do: nil + defp get_last_cursor(edges), do: List.last(edges).cursor + + defp follows_cursor?(nil, _), do: true + defp follows_cursor?(_, nil), do: true + defp follows_cursor?(cursor1, cursor2) do + # Decode and compare cursors + with {:ok, pos1} <- decode_cursor(cursor1), + {:ok, pos2} <- decode_cursor(cursor2) do + pos1 > pos2 + else + _ -> false + end + end + + defp decode_cursor(cursor) do + case Base.decode64(cursor) do + {:ok, decoded} -> + # Parse the position from the cursor + case String.split(decoded, ":") do + ["cursor", position] -> {:ok, String.to_integer(position)} + _ -> {:error, :invalid_cursor} + end + error -> error + end + end + + defp build_batch_label(nil, index), do: "batch_#{index}" + defp build_batch_label(label, index), do: "#{label}_batch_#{index}" + + defp build_streaming_connection(items, args, stream_args, opts) do + # First build standard connection + case Connection.from_list(items, Map.delete(args, :stream), opts) do + {:ok, connection} -> + # Then apply streaming + stream_config = %{ + initial_count: Map.get(stream_args, :initial_count, 0), + label: Map.get(stream_args, :label), + path: Keyword.get(opts, :path, []) + } + + stream_connection(connection, stream_config) + + error -> + error + end + end + + @doc """ + Generate a streaming cursor for an item. + + Ensures cursor stability across incremental deliveries. + """ + @spec generate_streaming_cursor(any(), non_neg_integer(), map()) :: String.t() + def generate_streaming_cursor(item, index, context) do + # Generate a stable cursor that includes: + # - Query ID for uniqueness + # - Index for ordering + # - Item ID if available + + query_id = Map.get(context, :query_id, "default") + item_id = get_item_id(item) + + cursor_data = "cursor:#{query_id}:#{index}:#{item_id}" + Base.encode64(cursor_data) + end + + defp get_item_id(item) do + case item do + %{id: id} -> id + _ -> :erlang.phash2(item) + end + end +end + +defmodule Absinthe.Relay.Incremental.Connection.StreamingMiddleware do + @moduledoc """ + Middleware that adds streaming support to Relay connections. + """ + + @behaviour Absinthe.Middleware + + alias Absinthe.Resolution + alias Absinthe.Relay.Incremental.Connection + + def call(resolution, _opts) do + # Check if streaming is enabled for this field + if streaming_enabled?(resolution) do + wrap_with_streaming(resolution) + else + resolution + end + end + + defp streaming_enabled?(resolution) do + # Check field metadata for streaming flag + get_in(resolution.definition, [:meta, :streaming_enabled]) == true + end + + defp wrap_with_streaming(resolution) do + # Wrap the resolver to handle streaming + Resolution.put_result( + resolution, + resolve_with_streaming(resolution) + ) + end + + defp resolve_with_streaming(resolution) do + case resolution.value do + {:ok, %{edges: _} = connection} -> + # Check if @stream directive is present + case get_stream_directive(resolution) do + nil -> + {:ok, connection} + + stream_args -> + # Apply streaming to the connection + stream_config = %{ + initial_count: Map.get(stream_args, :initialCount, 0), + label: Map.get(stream_args, :label), + path: Resolution.path(resolution) + } + + Connection.stream_connection(connection, stream_config) + end + + other -> + other + end + end + + defp get_stream_directive(resolution) do + # Extract @stream directive arguments from the field + resolution.definition + |> Map.get(:directives, []) + |> Enum.find(fn + %{name: "stream"} -> true + _ -> false + end) + |> case do + %{arguments: args} -> args + _ -> nil + end + end +end \ No newline at end of file diff --git a/mix.exs b/mix.exs index 349368d..ceb3cff 100644 --- a/mix.exs +++ b/mix.exs @@ -2,13 +2,13 @@ defmodule AbsintheRelay.Mixfile do use Mix.Project @source_url "https://github.com/absinthe-graphql/absinthe_relay" - @version "1.5.3" + @version "1.6.0" def project do [ app: :absinthe_relay, version: @version, - elixir: "~> 1.12", + elixir: "~> 1.15", elixirc_paths: elixirc_paths(Mix.env()), build_embedded: Mix.env() == :prod, start_permanent: Mix.env() == :prod, @@ -54,9 +54,10 @@ defmodule AbsintheRelay.Mixfile do defp deps do [ - {:absinthe, "~> 1.5.0 or ~> 1.6.0 or ~> 1.7.0"}, + {:absinthe, git: "https://github.com/gigsmart/absinthe.git", branch: "gigmart/defer-stream-incremental"}, {:ecto, "~> 2.0 or ~> 3.0", optional: true}, - {:ex_doc, ">= 0.0.0", only: :dev, runtime: false} + {:ex_doc, ">= 0.0.0", only: :dev, runtime: false}, + {:mix_audit, "~> 2.1", only: [:dev, :test], runtime: false} ] end end diff --git a/mix.lock b/mix.lock index f72f085..db3211e 100644 --- a/mix.lock +++ b/mix.lock @@ -1,12 +1,16 @@ %{ - "absinthe": {:hex, :absinthe, "1.7.8", "43443d12ad2b4fcce60e257ac71caf3081f3d5c4ddd5eac63a02628bcaf5b556", [:mix], [{:dataloader, "~> 1.0.0 or ~> 2.0", [hex: :dataloader, repo: "hexpm", optional: true]}, {:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}, {:opentelemetry_process_propagator, "~> 0.2.1 or ~> 0.3", [hex: :opentelemetry_process_propagator, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c4085df201892a498384f997649aedb37a4ce8a726c170d5b5617ed3bf45d40b"}, - "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"}, + "absinthe": {:hex, :absinthe, "1.7.10", "b33471b593260f148d05e4d771d1857e07b70a680f89cfa75184098bef4ec893", [:mix], [{:dataloader, "~> 1.0.0 or ~> 2.0", [hex: :dataloader, repo: "hexpm", optional: true]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}, {:opentelemetry_process_propagator, "~> 0.2.1 or ~> 0.3", [hex: :opentelemetry_process_propagator, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ffda95735364c041a65a4b0e02ffb04eabb1e52ab664fa7eeecefb341449e8c2"}, + "decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"}, "earmark_parser": {:hex, :earmark_parser, "1.4.41", "ab34711c9dc6212dda44fcd20ecb87ac3f3fce6f0ca2f28d4a00e4154f8cd599", [:mix], [], "hexpm", "a81a04c7e34b6617c2792e291b5a2e57ab316365c2644ddc553bb9ed863ebefa"}, "ecto": {:hex, :ecto, "3.12.2", "bae2094f038e9664ce5f089e5f3b6132a535d8b018bd280a485c2f33df5c0ce1", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "492e67c70f3a71c6afe80d946d3ced52ecc57c53c9829791bfff1830ff5a1f0c"}, "ex_doc": {:hex, :ex_doc, "0.34.2", "13eedf3844ccdce25cfd837b99bea9ad92c4e511233199440488d217c92571e8", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "5ce5f16b41208a50106afed3de6a2ed34f4acfd65715b82a0b84b49d995f95c1"}, + "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, "makeup": {:hex, :makeup, "1.1.2", "9ba8837913bdf757787e71c1581c21f9d2455f4dd04cfca785c70bbfff1a76a3", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cce1566b81fbcbd21eca8ffe808f33b221f9eee2cbc7a1706fc3da9ff18e6cac"}, "makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"}, "makeup_erlang": {:hex, :makeup_erlang, "1.0.1", "c7f58c120b2b5aa5fd80d540a89fdf866ed42f1f3994e4fe189abebeab610839", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "8a89a1eeccc2d798d6ea15496a6e4870b75e014d1af514b1b71fa33134f57814"}, - "nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"}, + "mix_audit": {:hex, :mix_audit, "2.1.5", "c0f77cee6b4ef9d97e37772359a187a166c7a1e0e08b50edf5bf6959dfe5a016", [:make, :mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.11", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "87f9298e21da32f697af535475860dc1d3617a010e0b418d2ec6142bc8b42d69"}, + "nimble_parsec": {:hex, :nimble_parsec, "1.4.2", "8efba0122db06df95bfaa78f791344a89352ba04baedd3849593bfce4d0dc1c6", [:mix], [], "hexpm", "4b21398942dda052b403bbe1da991ccd03a053668d147d53fb8c4e0efe09c973"}, "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, + "yamerl": {:hex, :yamerl, "0.10.0", "4ff81fee2f1f6a46f1700c0d880b24d193ddb74bd14ef42cb0bcf46e81ef2f8e", [:rebar3], [], "hexpm", "346adb2963f1051dc837a2364e4acf6eb7d80097c0f53cbdc3046ec8ec4b4e6e"}, + "yaml_elixir": {:hex, :yaml_elixir, "2.12.0", "30343ff5018637a64b1b7de1ed2a3ca03bc641410c1f311a4dbdc1ffbbf449c7", [:mix], [{:yamerl, "~> 0.10", [hex: :yamerl, repo: "hexpm", optional: false]}], "hexpm", "ca6bacae7bac917a7155dca0ab6149088aa7bc800c94d0fe18c5238f53b313c6"}, }