diff --git a/.github/workflows/check-formatting.yaml.yml b/.github/workflows/check-formatting.yaml.yml index 97389594..67aec132 100644 --- a/.github/workflows/check-formatting.yaml.yml +++ b/.github/workflows/check-formatting.yaml.yml @@ -7,7 +7,7 @@ jobs: name: Check Python formatting with ruff runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 - uses: actions/setup-python@v3 - uses: astral-sh/ruff-action@v3 @@ -15,7 +15,16 @@ jobs: name: Check Snakemake formatting with snakefmt runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 - name: Install the latest version of uv uses: astral-sh/setup-uv@v7 - run: uv run snakefmt --check --compact-diff . + + check-formatting-with-markdownlint-cli2: + name: Check Markdown formatting with pymarkdownlnt + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v7 + - run: uv run pymarkdownlnt scan . diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0d2423f1..97c65119 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -31,5 +31,14 @@ and if you can help get them to pass, that would be great! You can contribute source code by forking this repository, creating a new branch, and then submitting a pull request to our [GitHub repository](https://github.com/NCATSTranslator/Babel). -Code submitted for review should use [ruff](https://docs.astral.sh/ruff/) to fix any style issues before submission. -You can run ruff by running `uv run ruff check`, and you can use `uv run ruff check --fix` to automatically fix some issues. \ No newline at end of file +We use three linters to check the style of submitted code in GitHub pull requests -- don't worry if this is difficult +to do at your end, as it is easy to fix in a pull request: +* [ruff](https://docs.astral.sh/ruff/) for Python code + * You can run this locally by running `uv run ruff check`. + * You can use `uv run ruff check --fix` to automatically fix some issues. +* [snakefmt](https://github.com/snakemake/snakefmt) for Snakemake files + * You can run this locally by running `uv run snakefmt --check --compact-diff .`. + * You can use `uv run snakefmt .` to automatically fix some issues. +* [pymarkdownlnt](https://pypi.org/project/pymarkdownlnt/) for Markdown files + * You can run this locally by running `uv run pymarkdownlint scan .`. + * You can use `uv run pymarkdownlint fix .` to automatically fix some issues. \ No newline at end of file diff --git a/DEPLOYMENT.md b/DEPLOYMENT.md deleted file mode 100644 index 1e7acda6..00000000 --- a/DEPLOYMENT.md +++ /dev/null @@ -1,51 +0,0 @@ -# Release information for Babel, NodeNorm and NameRes - -There are two main installations of NodeNorm that would be of interest -to users who aren't system administrators for these tools: - -* ITRB Production - * NodeNorm: https://nodenorm.transltr.io/docs - * NameRes: https://name-lookup.transltr.io/docs -* RENCI Development - * NodeNorm: https://nodenormalization-sri.renci.org/docs - * NameRes: https://name-resolution-sri.renci.org/docs - -## Release process and checkpoints -1. Create a new Babel release (see README.md for information). -2. Store the Babel outputs alongside other Babel releases on Hatteras. -3. Start validating the Babel release (see [Babel Validation] for information). - 1. Start a `validate` run that loads all the input files and generates count information. - 2. Start a `diff` run that compares this release with the previous Babel release. -4. Deploy a new NodeNorm instance - 1. Split the Babel outputs into smaller files to improve load times and put them on a public web server. - 2. Update the Translator-devops repo with the URL to these Babel output files. - 3. Create an [redis r3 external] instance to store identifiers. - 4. Run the [NodeNorm loader] to load the Babel outputs into the redis r3 instance. - 5. Create a [NodeNorm web server] to share the data in the redis r3 instance. -5. Deploy a new NameRes instance - 1. Create an empty Apache Solr instance. - 2. Load it with synonym information from Babel outputs. - 3. Write out a Solr backup and store it as a tarball. - 4. Copy the Solr backup to a publicly accessible URL. - 5. Update the Translator-devops repo with the new URL. - 6. Create a NameRes instance that will download the Solr backup and start the instance with it (see [NameRes devops] for information). -6. **Check with RENCI NodeNorm users before updating RENCI NodeNorm and NameRes instances** -7. Update RENCI NodeNorm and NameRes instances. -8. Announce on Translator and RENCI channels and ask people to try it out. -9. Deploy to ITRB - 1. Use the bastion servers to delete all data from the ITRB CI Redis R3 server. - 2. Update the Translator-Devops repo and create a PR for the develop branch. Once merged, the new Babel outputs should be loaded into the ITRB CI Redis R3 server. - 3. Use the bastion servers to delete all data from the ITRB Test Redis R3 server. - 4. Ask ITRB to run the NodeNorm loader to populate the ITRB Test Redis R3 server. - 5. **Announce upcoming downtime to NodeNorm Prod.** - 6. Ask ITRB to take down NodeNorm Prod. - 7. Use the bastion servers to delete all data from the ITRB Prod Redis R3 server. - 8. Ask ITRB to run the NodeNorm loader to populate the ITRB Prod Redis R3 server. - 9. Ask ITRB to start the NodeNorm Prod instance. - - - [Babel Validator]: https://github.com/TranslatorSRI/babel-validation - [redis r3 external]: https://github.com/helxplatform/translator-devops/tree/3e16517d6adc41db8f2156cc747b7a5ac20ee62d/helm/redis-r3-external - [NodeNorm loader]: https://github.com/helxplatform/translator-devops/tree/3e16517d6adc41db8f2156cc747b7a5ac20ee62d/helm/node-normalization-loader - [NodeNorm web server]: https://github.com/helxplatform/translator-devops/tree/3e16517d6adc41db8f2156cc747b7a5ac20ee62d/helm/node-normalization-web-server - [NameRes devops]: https://github.com/helxplatform/translator-devops/tree/3e16517d6adc41db8f2156cc747b7a5ac20ee62d/helm/name-lookup diff --git a/README.md b/README.md index 03a4d509..237ba299 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ both of these identifiers are identifying the same concept. Babel integrates the specific naming systems used in the Translator, creating equivalent sets across multiple semantic types, and following the -conventions established by the [biolink model](https://github.com/biolink/biolink-model). It checks these conventions +conventions established by the [Biolink Model](https://github.com/biolink/biolink-model). It checks these conventions at runtime by querying the [Biolink Model service](https://github.com/TranslatorIIPrototypes/bl_lookup). Each semantic type (such as chemical substance) requires specialized processing, but in each case, a JSON-formatted compendium is written to disk. This compendium can be used @@ -21,166 +21,183 @@ We anticipate that the simple approach taken here will soon be overtaken by more advanced probabilistic procedures, so caution should be taken in building strong dependencies against the Babel code. -## Configuration - -The [`./kubernetes`](./kubernetes/README.md) directory contains Kubernetes manifest files -that can be used to set up a Pod to run Babel in. They'll give you an idea of the disk -space and memory requirements needed to run this pipeline. - -Before running, read through `config.yaml` and make sure that the settings look correct. -You will need to update the version numbers of some databases that need to be downloaded, -or change the download and output directories. - -A UMLS API key is required in order to download UMLS and RxNorm databases. You will need -to set the `UMLS_API_KEY` environmental variable to a UMLS API key, which you can obtain -by creating a profile on the [UMLS Terminology Services website](https://uts.nlm.nih.gov/uts). - -## Building Compendia - -To run Babel, you will need to [install `uv`](https://docs.astral.sh/uv/getting-started/installation/). -`uv` manages the Python environment and installs dependencies for you. - -Compendia building is managed by snakemake. To build, for example, the anatomy related compendia, run - -```uv run snakemake --cores 1 anatomy``` - -Currently, the following targets build compendia and synonym files: -* anatomy -* chemical -* disease -* gene -* genefamily -* protein -* macromolecular_complex -* taxon -* process -* publications - -And these two build conflations: -* geneprotein -* drugchemical - -Each target builds one or more compendia corresponding to a biolink model category. For instance, the anatomy target -builds compendia for `biolink:AnatomicalEntity`, `biolink:Cell`, `biolink:CellularComponent`, and `biolink:GrossAnatomicalStructure`. - -You can also just run: - -```uv run snakemake --cores 1``` - -without a target to create all the files that are produced as part of Babel, including all reports and -alternate exports. - -If you have multiple CPUs available, you can increase the number of `--cores` to run multiple steps in parallel. - -## Build Process - -The information contained here is not required to create the compendia, but may be useful to understand. The build process is -divided into two parts: - -1. Pulling data from external sources and parsing it independent of use. -2. Extracting and combining entities for specific types from these downloaded data sets. - -This distinction is made because a single data set, such as MeSH or UMLS may contain entities of many different types and may be -used by many downstream targets. - -### Pulling Data - -The datacollection snakemake file coordinates pulling data from external sources into a local filesystem. Each data source -has a module in `src/datahandlers`. Data goes into the `babel_downloads` directory, in subdirectories named by the curie prefix -for that data set. If the directory is misnamed and does not match the prefix, then labels will not be added to the identifiers -in the final compendium. - -Once data is assembled, we attempt to create two extra files for each data source: `labels` and `synonyms`. `labels` is -a two-column tab-delimited file. The first column is a CURIE identifier from the data source, and the second column is the -label from that data set. Each entity should only appear once in the `labels` file. The `labels` file for a data set -does not subset the data for a specific purpose, but contains all labels for any entity in that data set. - -`synonyms` contains other lexical names for the entity and is a 3-column tab-delimited file, with the second column -indicating the type of synonym (exact, related, xref, etc.) - -### Creating compendia - -The individual details of creating a compendium vary, but all follow the same essential pattern. - -First, we extract the identifiers that will be used in the compendia from each data source that will contribute, and -place them into a directory. For instance, in the build of the chemical compendium, these ids are placed into -`/babel_downloads/chemical/ids`. Each file is a two-column file containing curie identifiers in column 1, and the -Biolink type for that entity in column 2. - -Second, we create pairwise concords across vocabularies. These are placed in e.g. `babel_downloads/chemical/concords`. -Each concord is a three-column file of the format: - -` ` - -While the relation is currently unused, future versions of Babel may use the relation in building cliques. - -Third, the compendia is built by bringing together the ids and concords, pulling in the categories from the id files, -and the labels from the label files. - -Fourth, the compendia is assessed to make sure that all the ids in the id files made into one of the possibly multiple -compendia. The compendia are further assessed to locate large cliques and display the level of vocabulary merging. - -## Building with Docker - -You can build this repository by running the following Docker command: - -``` -$ docker build . -``` - -It is also set up with a GitHub Action that will automatically generate and publish -Docker images to https://github.com/TranslatorSRI/Babel/pkgs/container/babel. - -**Known issue**: if you want to use `git fetch` from this Docker image, you need -to manually remote the Basic authentication command from `.git/config` before it -will work. We're tracking this at https://github.com/TranslatorSRI/Babel/issues/119. - -## Running with Docker - -You can also run Babel with [Docker](https://www.docker.com/). There are -two directories you need to bind or mount from outside the container: - -``` -$ docker run -it --rm --mount type=bind,source=...,target=/home/runner/babel/babel_downloads --entrypoint /bin/bash ggvaidya/babel -``` - -The download directory (`babel/babel_downloads`) is used to store data files downloaded during Babel assembly. - -The script `scripts/build-babel.sh` can be used to run `snakemake` with a few useful settings (although just running -`uv run snakemake --cores 5` should work just fine.) - -## Deploying with Kubernetes - -The `kubernetes/` directory has example Kubernetes scripts for deploying Babel to a Kubernetes cluster. You need to -create three resources: -* `kubernetes/babel-downloads.k8s.yaml` creates a Persistent Volume Claim (PVC) for downloading input resources from - the internet. -* `kubernetes/babel-outputs.k8s.yaml` creates a PVC for storing the output files generated by Babel. This includes - compendia, synonym files, reports and intermediate files. -* `kubernetes/babel.k8s.yaml` creates a pod running the latest Docker image from ggvaidya/babel. Rather than running - the data generation automatically, you are expected to SSH into this pod and start the build process by: - 1. Edit the script `scripts/babel-build.sh` to clear the `DRY_RUN` property so that it doesn't , i.e.: - ```shell - export DRY_RUN= - ``` - 2. Creating a [screen](https://www.gnu.org/software/screen/) to run the program in. You can start a Screen by - running: - - ```shell - $ screen - ``` - 3. Starting the Babel build process by running: - - ```shell - $ bash scripts/babel-build.sh - ``` - - Ideally, this should produce the entire Babel output in a single run. You can also add `--rerun-incomplete` if you - need to restart a partially completed job. - - To help with debugging, the Babel image includes .git information. You can switch branches, or fetch new branches - from GitHub by running `git fetch origin-https`. - - 4. Press `Ctrl+A D` to "detach" the screen. You can reconnect to a detached screen by running `screen -r`. - You can also see a list of all running screens by running `screen -l`. - 5. Once the generation completes, all output files should be in the `babel_outputs` directory. +## Available documentation + +If you use Babel outputs, either directly or through the Node Normalization +service, keep reading! Additional documentation about some specific +Babel functionality is also available: +* [Babel data formats](./docs/DataFormats.md) +* [Conflation](./docs/Conflation.md) + +If you want to contribute to Babel, start with the [Contributing to Babel](./docs/CONTRIBUTING.md) +documentation. This will provide guidance on how the source code is organized, what contributions +are most useful, and how to run the tests. + +If you want to run Babel locally, start with the [Running Babel](./docs/Running.md) +documentation. Information on [deploying Babel outputs](./docs/Deployment.md) is also available. + +If you would like to cite Babel, please cite our GitHub repository (https://github.com/TranslatorSRI/Babel). +A manuscript is in progress. + +## What does Babel do? + +Babel was built as part of the [NCATS Translator project](https://ui.transltr.io/) to solve the problem +of multiple databases using different identifiers (specifically, [CURIEs](https://en.wikipedia.org/wiki/CURIE)) to +refer to the same concept, such as [CHEBI:15377 "water"](https://www.ebi.ac.uk/chebi/searchId.do?chebiId=15377) and +[PUBCHEM.COMPOUND:962 "water"](https://pubchem.ncbi.nlm.nih.gov/compound/962). Babel downloads many online +databases of identifiers and uses their cross-reference information to identify +_cliques_ of identifiers that refer to the same concept. Each clique is assigned a +type from the [Biolink Model](https://github.com/biolink/biolink-model), which determines which identifier prefixes are +allowed and the order in which the identifiers are presented. One of these identifiers +is chosen to be the _preferred identifier_ for the clique. Within Translator, this +information is made available through the [Node Normalization service](https://github.com/TranslatorSRI/NodeNormalization). + +In certain contexts, differentiating between some related cliques doesn't make sense: +for example, you might not want to differentiate between a gene and the product of that +gene, such as a protein. Babel provides different [conflations](./Conflation.md) that group cliques +on the basis of various criteria: for example, the GeneProtein conflation combines a +gene with the protein that that gene encodes. + +While generating these cliques, Babel also collects all the synonyms for every clique, +which can then be used by tools like [Name Resolution (NameRes)](https://github.com/TranslatorSRI/NameResolution) to provide +name-based lookup of concepts. + +## How can I access Babel cliques? + +There are several ways of accessing Babel cliques: +* You can run the Babel pipeline to generate the cliques yourself. Note that Babel + currently has very high memory requirements -- it requires around 500G of memory + in order to generate the Protein clique. Information on [running Babel](./docs/Running.md) + is available. +* The NCATS Translator project provides the [Node Normalization](https://nodenorm.transltr.io/docs) frontend to + "normalize" identifiers -- any member of a particular clique will be normalized + to the same preferred identifier, and the API will return all the secondary + identifiers, Biolink type, description and other useful information. + You can find out more about this frontend on [its GitHub repository](https://github.com/TranslatorSRI/NodeNormalization). +* The NCATS Translator project also provides the [Name Lookup (Name Resolution)](https://name-lookup.transltr.io/) + frontends for searching for concepts by labels or synonyms. You can find out more + about this frontend at [its GitHub repository](https://github.com/TranslatorSRI/NameResolution). +* Members of the Translator consortium can also request access to the [Babel outputs](./docs/BabelOutputs.md) + (in a [custom format](./docs/DataFormats.md)), + which are currently available in JSONL, [Apache Parquet](https://parquet.apache.org/) or [KGX](https://github.com/biolink/kgx) formats. + +## What is the Node Normalization service (NodeNorm)? + +The Node Normalization service, Node Normalizer or [NodeNorm](https://github.com/TranslatorSRI/NodeNormalization) is an +NCATS Translator web service to normalize identifiers by returning a single preferred identifier for any identifier +provided. + +In addition to returning the preferred identifier and all the secondary identifiers for a clique, NodeNorm will also +return its Biolink type and ["information content" score](#what-are-information-content-values), and optionally any +descriptions we have for these identifiers. + +It also includes some endpoints for normalizing an entire TRAPI message and other APIs intended primarily for +Translator users. + +You can find out more about NodeNorm at its [Swagger interface](https://nodenormalization-sri.renci.org/docs) +or [in this Jupyter Notebook](https://github.com/TranslatorSRI/NodeNormalization/blob/master/documentation/NodeNormalization.ipynb). + +## What is the Name Resolution service (NameRes)? + +The Name Resolution service, Name Lookup or [NameRes](https://github.com/TranslatorSRI/NameResolution) is an +NCATS Translator web service for looking up preferred identifiers by search text. Although it is primarily +designed to be used to power NCATS Translator's autocomplete text fields, it has also been used for +named-entity linkage. + +You can find out more about NameRes at its [Swagger interface](https://name-resolution-sri.renci.org/docs) +or [in this Jupyter Notebook](https://github.com/TranslatorSRI/NameResolution/blob/master/documentation/NameResolution.ipynb). + +## What are "information content" values? + +Babel obtains information content values for over 3.8 million concepts from +[Ubergraph](https://github.com/INCATools/ubergraph?tab=readme-ov-file#graph-organization) based on the number of +terms related to the specified term as either a subclass or any existential relation. They are decimal values +that range from 0.0 (high-level broad term with many subclasses) to 100.0 (very specific term with no subclasses). + +## I've found a "split" clique: two identifiers that should be considered identical are in separate cliques. + +Please report this as an issue to the [Babel GitHub repository](https://github.com/TranslatorSRI/Babel/issues). +At a minimum, please include the identifiers (CURIEs) for the identifiers that should be combined. Links to +a NodeNorm instance showing the two cliques are very helpful. Evidence supporting the lumping, such as a link to an +external database that makes it clear that these identifiers refer to the same concept, are also very helpful: while we +have some ability to combine cliques manually if needed urgently for some application, we prefer to find a source of +mappings that would combine the two identifiers, allowing us to improve cliquing across Babel. + +## I've found a "lumped" clique: two identifiers that are combined in a single clique refer to different concepts. + +Please report this as an issue to the [Babel GitHub repository](https://github.com/TranslatorSRI/Babel/issues). +At a minimum, please include the identifiers (CURIEs) for the identifiers that should be split. Links to +a NodeNorm instance showing the lumped clique is very helpful. Evidence, such as a link to an external database +that makes it clear that these identifiers refer to the same concept, are also very helpful: while we have some +ability to combine cliques manually if needed urgently for some application, we prefer to find a source of mappings +that would combine the two identifiers, allowing us to improve cliquing across Babel. + +## How does Babel choose a preferred identifier for a clique? + +After determining the equivalent identifiers that belong in a single clique, Babel sorts them in the order of CURIE +prefixes for that Biolink type as determined by the Biolink Model. For example, for a [biolink:SmallMolecule](https://biolink.github.io/biolink-model/SmallMolecule/#valid-id-prefixes), +any CHEBI identifiers will appear first, followed by any UNII identifiers, and so on. The first identifier in this list +is the preferred identifier for the clique. + +[Conflations](./docs/Conflation.md) are lists of identifiers that are merged in that order when that conflation is applied. The preferred identifier for +the clique is therefore the preferred identifier of the first clique being conflated. +* For GeneProtein conflation, the preferred identifier is a gene. +* For DrugChemical conflation, Babel uses the [following algorithm](https://github.com/NCATSTranslator/Babel/blob/f3ff2103e74bc9b6bee9483355206b32e8f9ae9b/src/createcompendia/drugchemical.py#L466-L538): + 1. We first choose an overall Biolink type for the conflated clique. To do this, we use a ["preferred Biolink type" + order](https://github.com/NCATSTranslator/Babel/blob/f3ff2103e74bc9b6bee9483355206b32e8f9ae9b/config.yaml#L32-L50) + that can be configured in [config.yaml](./config.yaml) and choose the most preferred Biolink type that is present + in the conflated clique. + 2. We then group the cliques to be conflated by the prefix of their preferred identifier, and sort them based on + the preferred prefix order for the chosen Biolink type. + 3. If there are multiple cliques with the same prefix in their preferred identifier, we use the following criteria + to sort them: + 1. A clique with a lower information content value will be sorted before those with a higher information content + or no information content at all. + 2. A clique with more identifiers are sorted before those with fewer identifiers. + 3. A clique whose preferred identifier has a smaller numerical suffix will be sorted before those with a larger + numerical suffix. + +## How does Babel choose a preferred label for a clique? + +For most Biolink types, the preferred label for a clique is the label of the preferred identifier. There is a +[`demote_labels_longer_than`](https://github.com/NCATSTranslator/Babel/blob/master/config.yaml#L437) configuration +parameter that -- if set -- will cause labels that are longer than the specified number of characters to be ignored +unless no labels shorter than that length are present. This is to avoid overly long labels when a more concise +label is available. + +Biolink types that are chemicals (i.e. [biolink:ChemicalEntity](https://biolink.github.io/biolink-model/ChemicalEntity/) +and its subclasses) have a special list of [preferred name boost prefixes](https://github.com/NCATSTranslator/Babel/blob/f3ff2103e74bc9b6bee9483355206b32e8f9ae9b/config.yaml#L416-L426) +that are used to prioritize labels. This list is currently: +1. DRUGBANK +2. DrugCentral +3. CHEBI +4. MESH +5. CHEMBL.COMPOUND +6. GTOPDB +7. HMDB +8. RXCUI +9. PUBCHEM.COMPOUND + +[Conflations](./docs/Conflation.md) are lists of identifiers that are merged in that order when that conflation is applied. The preferred label for +the conflated clique is therefore the preferred label of the first clique being conflated. + +## Where do the clique descriptions come from? + +Currently, all descriptions for NodeNorm concepts come from [UberGraph](https://github.com/INCATools/ubergraph/). You +will note that descriptions are collected for every identifier within a clique, and then the description associated +with the most preferred identifier is provided for the preferred identifier. Descriptions are not included in NameRes, +but the `description` flag can be used to include any descriptions when returning cliques from NodeNorm. + +## How can I build Babel? + +Babel is difficult to build, primarily because of its inefficient memory handling -- we currently need around 500G of +memory to build the largest compendia (Protein and DrugChemical conflated information), although the smaller +compendia should be buildable with far less memory. We are working on reducing these restrictions as far as possible. +You can read more about [Babel's build process](./docs/Running.md), and please do contact us if you run +into any problems or would like some assistance. + +## Who should I contact for more information about Babel? + +You can find out more about Babel by [opening an issue on this repository](https://github.com/TranslatorSRI/Babel/issues), +contacting one of the [Translator SRI PIs](https://ncats.nih.gov/research/research-activities/translator/projects) or +contacting the [NCATS Translator team](https://ncats.nih.gov/research/research-activities/translator/about). \ No newline at end of file diff --git a/docs/BabelOutputs.md b/docs/BabelOutputs.md new file mode 100644 index 00000000..a260c033 --- /dev/null +++ b/docs/BabelOutputs.md @@ -0,0 +1,4 @@ +# Babel Outputs + +Babel outputs are currently generated solely for the NCATS Translator project. When we make general-purpose Babel outputs +available, we will provide instructions on accessing them here. \ No newline at end of file diff --git a/docs/Deployment.md b/docs/Deployment.md new file mode 100644 index 00000000..06acaf30 --- /dev/null +++ b/docs/Deployment.md @@ -0,0 +1,50 @@ +# Release information for Babel, NodeNorm and NameRes + +There are two main installations of NodeNorm that would be of interest +to users who aren't system administrators for these tools: + +* ITRB Production + * NodeNorm: https://nodenorm.transltr.io/docs + * NameRes: https://name-lookup.transltr.io/docs +* RENCI Development + * NodeNorm: https://nodenormalization-sri.renci.org/docs + * NameRes: https://name-resolution-sri.renci.org/docs + +## Release process and checkpoints +1. Create a new Babel release (see README.md for information). +2. Store the Babel outputs alongside other Babel releases on Hatteras. +3. Start validating the Babel release (see [Babel Validation] for information). + 1. Start a `validate` run that loads all the input files and generates count information. + 2. Start a `diff` run that compares this release with the previous Babel release. +4. Deploy a new NodeNorm instance + 1. Split the Babel outputs into smaller files to improve load times and put them on a public web server. + 2. Update the Translator-devops repo with the URL to these Babel output files. + 3. Create an [redis r3 external] instance to store identifiers. + 4. Run the [NodeNorm loader] to load the Babel outputs into the redis r3 instance. + 5. Create a [NodeNorm web server] to share the data in the redis r3 instance. +5. Deploy a new NameRes instance + 1. Create an empty Apache Solr instance. + 2. Load it with synonym information from Babel outputs. + 3. Write out a Solr backup and store it as a tarball. + 4. Copy the Solr backup to a publicly accessible URL. + 5. Update the Translator-devops repo with the new URL. + 6. Create a NameRes instance that will download the Solr backup and start the instance with it (see [NameRes devops] for information). +6. **Check with RENCI NodeNorm users before updating RENCI NodeNorm and NameRes instances** +7. Update RENCI NodeNorm and NameRes instances. +8. Announce on Translator and RENCI channels and ask people to try it out. +9. Deploy to ITRB + 1. Use the bastion servers to delete all data from the ITRB CI Redis R3 server. + 2. Update the Translator-Devops repo and create a PR for the develop branch. Once merged, the new Babel outputs should be loaded into the ITRB CI Redis R3 server. + 3. Use the bastion servers to delete all data from the ITRB Test Redis R3 server. + 4. Ask ITRB to run the NodeNorm loader to populate the ITRB Test Redis R3 server. + 5. **Announce upcoming downtime to NodeNorm Prod.** + 6. Ask ITRB to take down NodeNorm Prod. + 7. Use the bastion servers to delete all data from the ITRB Prod Redis R3 server. + 8. Ask ITRB to run the NodeNorm loader to populate the ITRB Prod Redis R3 server. + 9. Ask ITRB to start the NodeNorm Prod instance. + + [Babel Validator]: https://github.com/TranslatorSRI/babel-validation + [redis r3 external]: https://github.com/helxplatform/translator-devops/tree/3e16517d6adc41db8f2156cc747b7a5ac20ee62d/helm/redis-r3-external + [NodeNorm loader]: https://github.com/helxplatform/translator-devops/tree/3e16517d6adc41db8f2156cc747b7a5ac20ee62d/helm/node-normalization-loader + [NodeNorm web server]: https://github.com/helxplatform/translator-devops/tree/3e16517d6adc41db8f2156cc747b7a5ac20ee62d/helm/node-normalization-web-server + [NameRes devops]: https://github.com/helxplatform/translator-devops/tree/3e16517d6adc41db8f2156cc747b7a5ac20ee62d/helm/name-lookup diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index 0b6d6c8e..00000000 --- a/docs/README.md +++ /dev/null @@ -1,122 +0,0 @@ -# Babel Documentation - -This directory contains several pieces of Babel documentation. - -Both [Node Normalization (NodeNorm)](https://github.com/TranslatorSRI/NodeNormalization) and -[Name Resolution (NameRes or NameLookup)](https://github.com/TranslatorSRI/NameResolution) have their own GitHub repositories -with their own documentation, but this directory is intended to include all the basic instructions -needed to work with Babel and its tools. - -## What does Babel do? - -Babel was built as part of the [NCATS Translator project](https://ui.transltr.io/) to solve the problem -of multiple databases using different identifiers (specifically, [CURIEs](https://en.wikipedia.org/wiki/CURIE)) to -refer to the same concept, such as [CHEBI:15377 "water"](https://www.ebi.ac.uk/chebi/searchId.do?chebiId=15377) and -[PUBCHEM.COMPOUND:962 "water"](https://pubchem.ncbi.nlm.nih.gov/compound/962). Babel downloads many online -databases of identifiers and uses their cross-reference information to identify -_cliques_ of identifiers that refer to the same concept. Each clique is assigned a -type from the [Biolink Model](https://github.com/biolink/biolink-model), which determines which identifier prefixes are -allowed and the order in which the identifiers are presented. One of these identifiers -is chosen to be the _preferred identifier_ for the clique. Within Translator, this -information is made available through the [Node Normalization service](https://github.com/TranslatorSRI/NodeNormalization). - -In certain contexts, differentiating between some related cliques doesn't make sense: -for example, you might not want to differentiate between a gene and the product of that -gene, such as a protein. Babel provides different [conflations](./Conflation.md) that group cliques -on the basis of various criteria: for example, the GeneProtein conflation combines a -gene with the protein that that gene encodes. - -While generating these cliques, Babel also collects all the synonyms for every clique, -which can then be used by tools like [Name Resolution (NameRes)](https://github.com/TranslatorSRI/NameResolution) to provide -name-based lookup of concepts. - -## How can I access Babel cliques? - -There are several ways of accessing Babel cliques: -* You can run the Babel pipeline to generate the cliques yourself. Note that Babel - currently has very high memory requirements -- it requires around 500G of memory - in order to generate the Protein clique. The [main Babel README](../README.md) has - information on running this pipeline. -* The NCATS Translator project provides the [Node Normalization](https://nodenorm.transltr.io/docs) frontend to - "normalize" identifiers -- any member of a particular clique will be normalized - to the same preferred identifier, and the API will return all the secondary - identifiers, Biolink type, description and other useful information. - You can find out more about this frontend on [its GitHub repository](https://github.com/TranslatorSRI/NodeNormalization). -* The NCATS Translator project also provides the [Name Lookup (Name Resolution)](https://name-lookup.transltr.io/) - frontends for searching for concepts by labels or synonyms. You can find out more - about this frontend at [its GitHub repository](https://github.com/TranslatorSRI/NameResolution). -* Members of the Translator consortium can also request access to the [Babel outputs](./BabelOutputs.md) - (in a [custom format](./DataFormats.md)), - which are currently available in JSONL, [Apache Parquet](https://parquet.apache.org/) or [KGX](https://github.com/biolink/kgx) formats. - -## What is the Node Normalization service (NodeNorm)? - -The Node Normalization service, Node Normalizer or [NodeNorm](https://github.com/TranslatorSRI/NodeNormalization) is an -NCATS Translator web service to normalize identifiers by returning a single preferred identifier for any identifier -provided. - -In addition to returning the preferred identifier and all the secondary identifiers for a clique, NodeNorm will also -return its Biolink type and ["information content" score](#what-are-information-content-values), and optionally any -descriptions we have for these identifiers. - -It also includes some endpoints for normalizing an entire TRAPI message and other APIs intended primarily for -Translator users. - -You can find out more about NodeNorm at its [Swagger interface](https://nodenormalization-sri.renci.org/docs) -or [in this Jupyter Notebook](https://github.com/TranslatorSRI/NodeNormalization/blob/master/documentation/NodeNormalization.ipynb). - -## What is the Name Resolution service (NameRes)? - -The Name Resolution service, Name Lookup or [NameRes](https://github.com/TranslatorSRI/NameResolution) is an -NCATS Translator web service for looking up preferred identifiers by search text. Although it is primarily -designed to be used to power NCATS Translator's autocomplete text fields, it has also been used for -named-entity linkage. - -You can find out more about NameRes at its [Swagger interface](https://name-resolution-sri.renci.org/docs) -or [in this Jupyter Notebook](https://github.com/TranslatorSRI/NameResolution/blob/master/documentation/NameResolution.ipynb). - -## What are "information content" values? - -Babel obtains information content values for over 3.8 million concepts from -[Ubergraph](https://github.com/INCATools/ubergraph?tab=readme-ov-file#graph-organization) based on the number of -terms related to the specified term as either a subclass or any existential relation. They are decimal values -that range from 0.0 (high-level broad term with many subclasses) to 100.0 (very specific term with no subclasses). - -## I've found a "split" clique: two identifiers that should be considered identical are in separate cliques. - -Please report this as an issue to the [Babel GitHub repository](https://github.com/TranslatorSRI/Babel/issues). -At a minimum, please include the identifiers (CURIEs) for the identifiers that should be combined. Links to -a NodeNorm instance showing the two cliques are very helpful. Evidence supporting the lumping, such as a link to an -external database that makes it clear that these identifiers refer to the same concept, are also very helpful: while we -have some ability to combine cliques manually if needed urgently for some application, we prefer to find a source of -mappings that would combine the two identifiers, allowing us to improve cliquing across Babel. - -## I've found a "lumped" clique: two identifiers that are combined in a single clique refer to different concepts. - -Please report this as an issue to the [Babel GitHub repository](https://github.com/TranslatorSRI/Babel/issues). -At a minimum, please include the identifiers (CURIEs) for the identifiers that should be split. Links to -a NodeNorm instance showing the lumped clique is very helpful. Evidence, such as a link to an external database -that makes it clear that these identifiers refer to the same concept, are also very helpful: while we have some -ability to combine cliques manually if needed urgently for some application, we prefer to find a source of mappings -that would combine the two identifiers, allowing us to improve cliquing across Babel. - -## Where do the clique descriptions come from? - -Currently, all descriptions for NodeNorm concepts come from [UberGraph](https://github.com/INCATools/ubergraph/). You -will note that descriptions are collected for every identifier within a clique, and then the description associated -with the most preferred identifier is provided for the preferred identifier. Descriptions are not included in NameRes, -but the `description` flag can be used to include any descriptions when returning cliques from NodeNorm. - -## How can I build Babel? - -Babel is difficult to build, primarily because of its inefficient memory handling -- we currently need around 500G of -memory to build the largest compendia (Protein and DrugChemical conflated information), although the smaller -compendia should be buildable with far less memory. We are working on reducing these restrictions as far as possible. -You can read more about Babel's build process in the [main README](../README.md), and please do contact us if you run -into any problems or would like some assistance. - -## Who should I contact for more information about Babel? - -You can find out more about Babel by [opening an issue on this repository](https://github.com/TranslatorSRI/Babel/issues), -contacting one of the [Translator SRI PIs](https://ncats.nih.gov/research/research-activities/translator/projects) or -contacting the [NCATS Translator team](https://ncats.nih.gov/research/research-activities/translator/about). \ No newline at end of file diff --git a/docs/Running.md b/docs/Running.md new file mode 100644 index 00000000..77b6d658 --- /dev/null +++ b/docs/Running.md @@ -0,0 +1,161 @@ +# Running Babel + +## Configuration + +The [`./kubernetes`](./kubernetes/README.md) directory contains Kubernetes manifest files +that can be used to set up a Pod to run Babel in. They'll give you an idea of the disk +space and memory requirements needed to run this pipeline. + +Before running, read through `config.yaml` and make sure that the settings look correct. +You will need to update the version numbers of some databases that need to be downloaded, +or change the download and output directories. + +A UMLS API key is required in order to download UMLS and RxNorm databases. You will need +to set the `UMLS_API_KEY` environmental variable to a UMLS API key, which you can obtain +by creating a profile on the [UMLS Terminology Services website](https://uts.nlm.nih.gov/uts). + +## Building Compendia + +To run Babel, you will need to [install `uv`](https://docs.astral.sh/uv/getting-started/installation/). +`uv` manages the Python environment and installs dependencies for you. + +Compendia building is managed by snakemake. To build, for example, the anatomy related compendia, run + +```uv run snakemake --cores 1 anatomy``` + +Currently, the following targets build compendia and synonym files: +* anatomy +* chemical +* disease +* gene +* genefamily +* protein +* macromolecular_complex +* taxon +* process +* publications + +And these two build conflations: +* geneprotein +* drugchemical + +Each target builds one or more compendia corresponding to a biolink model category. For instance, the anatomy target +builds compendia for `biolink:AnatomicalEntity`, `biolink:Cell`, `biolink:CellularComponent`, and `biolink:GrossAnatomicalStructure`. + +You can also just run: + +```uv run snakemake --cores 1``` + +without a target to create all the files that are produced as part of Babel, including all reports and +alternate exports. + +If you have multiple CPUs available, you can increase the number of `--cores` to run multiple steps in parallel. + +## Build Process + +The information contained here is not required to create the compendia, but may be useful to understand. The build process is +divided into two parts: + +1. Pulling data from external sources and parsing it independent of use. +2. Extracting and combining entities for specific types from these downloaded data sets. + +This distinction is made because a single data set, such as MeSH or UMLS may contain entities of many different types and may be +used by many downstream targets. + +### Pulling Data + +The datacollection snakemake file coordinates pulling data from external sources into a local filesystem. Each data source +has a module in `src/datahandlers`. Data goes into the `babel_downloads` directory, in subdirectories named by the curie prefix +for that data set. If the directory is misnamed and does not match the prefix, then labels will not be added to the identifiers +in the final compendium. + +Once data is assembled, we attempt to create two extra files for each data source: `labels` and `synonyms`. `labels` is +a two-column tab-delimited file. The first column is a CURIE identifier from the data source, and the second column is the +label from that data set. Each entity should only appear once in the `labels` file. The `labels` file for a data set +does not subset the data for a specific purpose, but contains all labels for any entity in that data set. + +`synonyms` contains other lexical names for the entity and is a 3-column tab-delimited file, with the second column +indicating the type of synonym (exact, related, xref, etc.) + +### Creating compendia + +The individual details of creating a compendium vary, but all follow the same essential pattern. + +First, we extract the identifiers that will be used in the compendia from each data source that will contribute, and +place them into a directory. For instance, in the build of the chemical compendium, these ids are placed into +`/babel_downloads/chemical/ids`. Each file is a two-column file containing curie identifiers in column 1, and the +Biolink type for that entity in column 2. + +Second, we create pairwise concords across vocabularies. These are placed in e.g. `babel_downloads/chemical/concords`. +Each concord is a three-column file of the format: + +` ` + +While the relation is currently unused, future versions of Babel may use the relation in building cliques. + +Third, the compendia is built by bringing together the ids and concords, pulling in the categories from the id files, +and the labels from the label files. + +Fourth, the compendia is assessed to make sure that all the ids in the id files made into one of the possibly multiple +compendia. The compendia are further assessed to locate large cliques and display the level of vocabulary merging. + +## Building with Docker + +You can build this repository by running the following Docker command: + +``` +$ docker build . +``` + +It is also set up with a GitHub Action that will automatically generate and publish +Docker images to https://github.com/NCATSTranslator/Babel/pkgs/container/babel. + +## Running with Docker + +You can also run Babel with [Docker](https://www.docker.com/). There are +two directories you need to bind or mount from outside the container: + +``` +$ docker run -it --rm --mount type=bind,source=...,target=/home/runner/babel/babel_downloads --entrypoint /bin/bash ggvaidya/babel +``` + +The download directory (`babel/babel_downloads`) is used to store data files downloaded during Babel assembly. + +The script `scripts/build-babel.sh` can be used to run `snakemake` with a few useful settings (although just running +`uv run snakemake --cores 5` should work just fine.) + +## Running with Kubernetes + +The `kubernetes/` directory has example Kubernetes scripts for deploying Babel to a Kubernetes cluster. You need to +create three resources: +* `kubernetes/babel-downloads.k8s.yaml` creates a Persistent Volume Claim (PVC) for downloading input resources from + the internet. +* `kubernetes/babel-outputs.k8s.yaml` creates a PVC for storing the output files generated by Babel. This includes + compendia, synonym files, reports and intermediate files. +* `kubernetes/babel.k8s.yaml` creates a pod running the latest Docker image from ggvaidya/babel. Rather than running + the data generation automatically, you are expected to SSH into this pod and start the build process by: + 1. Edit the script `scripts/babel-build.sh` to clear the `DRY_RUN` property so that it doesn't , i.e.: + ```shell + export DRY_RUN= + ``` + 2. Creating a [screen](https://www.gnu.org/software/screen/) to run the program in. You can start a Screen by + running: + + ```shell + $ screen + ``` + 3. Starting the Babel build process by running: + + ```shell + $ bash scripts/babel-build.sh + ``` + + Ideally, this should produce the entire Babel output in a single run. You can also add `--rerun-incomplete` if you + need to restart a partially completed job. + + To help with debugging, the Babel image includes .git information. You can switch branches, or fetch new branches + from GitHub by running `git fetch origin-https`. + + 4. Press `Ctrl+A D` to "detach" the screen. You can reconnect to a detached screen by running `screen -r`. + You can also see a list of all running screens by running `screen -l`. + 5. Once the generation completes, all output files should be in the `babel_outputs` directory. diff --git a/pyproject.toml b/pyproject.toml index 6b6fbc88..73acf031 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ dependencies = [ "psutil>=7.1.1", "psycopg2-binary>=2.9.11", "py-spy>=0.4.1", + "pymarkdownlnt>=0.9.33", "pyoxigraph~=0.4.11", "pytest>=8.4.2", "pytest-cov>=7.0.0", @@ -56,4 +57,4 @@ line-length = 160 [tool.snakefmt] line_length = 160 -include = '\.snakefile$|^Snakefile' \ No newline at end of file +include = '\.snakefile$|^Snakefile' diff --git a/uv.lock b/uv.lock index 66478c4b..7dc8f46b 100644 --- a/uv.lock +++ b/uv.lock @@ -163,6 +163,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", size = 9566, upload-time = "2020-05-11T07:59:49.499Z" }, ] +[[package]] +name = "application-properties" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyjson5" }, + { name = "pyyaml" }, + { name = "tomli" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1f/95/86e4c6faea022a96a7d15de1aca384e7a32400539338cc1d22fa72f0371c/application_properties-0.9.0.tar.gz", hash = "sha256:98a623210f82c2ca3911b19ba00bddedf15a84133ad8aad03b317e9e1ce56666", size = 36441, upload-time = "2025-07-02T02:06:44.948Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/4c/18c89dabeaa60ebabffe53375aa3b9853ef10c47fdb3dfa979b5dbbfe4f7/application_properties-0.9.0-py3-none-any.whl", hash = "sha256:2f3d4cba46c4807c0dad5df632c379f1676d2c3b1a45a962f4f4527ce2713c97", size = 22433, upload-time = "2025-07-02T02:06:43.781Z" }, +] + [[package]] name = "apybiomart" version = "0.5.3" @@ -234,6 +249,7 @@ dependencies = [ { name = "psutil" }, { name = "psycopg2-binary" }, { name = "py-spy" }, + { name = "pymarkdownlnt" }, { name = "pyoxigraph" }, { name = "pytest" }, { name = "pytest-cov" }, @@ -268,6 +284,7 @@ requires-dist = [ { name = "psutil", specifier = ">=7.1.1" }, { name = "psycopg2-binary", specifier = ">=2.9.11" }, { name = "py-spy", specifier = ">=0.4.1" }, + { name = "pymarkdownlnt", specifier = ">=0.9.33" }, { name = "pyoxigraph", specifier = "~=0.4.11" }, { name = "pytest", specifier = ">=8.4.2" }, { name = "pytest-cov", specifier = ">=7.0.0" }, @@ -523,6 +540,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] +[[package]] +name = "columnar" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "toolz" }, + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/0d/a0b2fd781050d29c9df64ac6df30b5f18b775724b79779f56fc5a8298fe9/Columnar-1.4.1.tar.gz", hash = "sha256:c3cb57273333b2ff9cfaafc86f09307419330c97faa88dcfe23df05e6fbb9c72", size = 11386, upload-time = "2021-12-27T21:58:56.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/00/a17a5657bf090b9dffdb310ac273c553a38f9252f60224da9fe62d9b60e9/Columnar-1.4.1-py3-none-any.whl", hash = "sha256:8efb692a7e6ca07dcc8f4ea889960421331a5dffa8e5af81f0a67ad8ea1fc798", size = 11845, upload-time = "2021-12-27T21:58:54.388Z" }, +] + [[package]] name = "conda-inject" version = "1.3.2" @@ -956,6 +986,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385, upload-time = "2025-11-04T12:42:11.067Z" }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329, upload-time = "2025-11-04T12:42:12.928Z" }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, @@ -965,6 +997,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0", size = 1564846, upload-time = "2025-11-04T12:42:15.191Z" }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d", size = 1633814, upload-time = "2025-11-04T12:42:17.175Z" }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, @@ -974,6 +1008,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b", size = 1564759, upload-time = "2025-11-04T12:42:19.395Z" }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929", size = 1634288, upload-time = "2025-11-04T12:42:21.174Z" }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" }, @@ -981,6 +1017,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269", size = 1612508, upload-time = "2025-11-04T12:42:23.427Z" }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681", size = 1680760, upload-time = "2025-11-04T12:42:25.341Z" }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" }, ] @@ -2243,8 +2281,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881, upload-time = "2025-10-30T02:55:07.332Z" }, { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477, upload-time = "2025-10-30T02:55:11.182Z" }, { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, @@ -2252,8 +2292,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, + { url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" }, { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, + { url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" }, { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, { url = "https://files.pythonhosted.org/packages/ff/a8/a2709681b3ac11b0b1786def10006b8995125ba268c9a54bea6f5ae8bd3e/psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c", size = 3756572, upload-time = "2025-10-10T11:12:32.873Z" }, @@ -2261,8 +2303,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/11/32/b2ffe8f3853c181e88f0a157c5fb4e383102238d73c52ac6d93a5c8bffe6/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0", size = 4411242, upload-time = "2025-10-10T11:12:42.388Z" }, { url = "https://files.pythonhosted.org/packages/10/04/6ca7477e6160ae258dc96f67c371157776564679aefd247b66f4661501a2/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766", size = 4468258, upload-time = "2025-10-10T11:12:48.654Z" }, { url = "https://files.pythonhosted.org/packages/3c/7e/6a1a38f86412df101435809f225d57c1a021307dd0689f7a5e7fe83588b1/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3", size = 4166295, upload-time = "2025-10-10T11:12:52.525Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7d/c07374c501b45f3579a9eb761cbf2604ddef3d96ad48679112c2c5aa9c25/psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f", size = 3983133, upload-time = "2025-10-30T02:55:24.329Z" }, { url = "https://files.pythonhosted.org/packages/82/56/993b7104cb8345ad7d4516538ccf8f0d0ac640b1ebd8c754a7b024e76878/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4", size = 3652383, upload-time = "2025-10-10T11:12:56.387Z" }, { url = "https://files.pythonhosted.org/packages/2d/ac/eaeb6029362fd8d454a27374d84c6866c82c33bfc24587b4face5a8e43ef/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c", size = 3298168, upload-time = "2025-10-10T11:13:00.403Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/50c3facc66bded9ada5cbc0de867499a703dc6bca6be03070b4e3b65da6c/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60", size = 3044712, upload-time = "2025-10-30T02:55:27.975Z" }, { url = "https://files.pythonhosted.org/packages/9c/8e/b7de019a1f562f72ada81081a12823d3c1590bedc48d7d2559410a2763fe/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1", size = 3347549, upload-time = "2025-10-10T11:13:03.971Z" }, { url = "https://files.pythonhosted.org/packages/80/2d/1bb683f64737bbb1f86c82b7359db1eb2be4e2c0c13b947f80efefa7d3e5/psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa", size = 2714215, upload-time = "2025-10-10T11:13:07.14Z" }, { url = "https://files.pythonhosted.org/packages/64/12/93ef0098590cf51d9732b4f139533732565704f45bdc1ffa741b7c95fb54/psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1", size = 3756567, upload-time = "2025-10-10T11:13:11.885Z" }, @@ -2270,8 +2314,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/13/1e/98874ce72fd29cbde93209977b196a2edae03f8490d1bd8158e7f1daf3a0/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5", size = 4411646, upload-time = "2025-10-10T11:13:24.432Z" }, { url = "https://files.pythonhosted.org/packages/5a/bd/a335ce6645334fb8d758cc358810defca14a1d19ffbc8a10bd38a2328565/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8", size = 4468701, upload-time = "2025-10-10T11:13:29.266Z" }, { url = "https://files.pythonhosted.org/packages/44/d6/c8b4f53f34e295e45709b7568bf9b9407a612ea30387d35eb9fa84f269b4/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c", size = 4166293, upload-time = "2025-10-10T11:13:33.336Z" }, + { url = "https://files.pythonhosted.org/packages/4b/e0/f8cc36eadd1b716ab36bb290618a3292e009867e5c97ce4aba908cb99644/psycopg2_binary-2.9.11-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e35b7abae2b0adab776add56111df1735ccc71406e56203515e228a8dc07089f", size = 3983184, upload-time = "2025-10-30T02:55:32.483Z" }, { url = "https://files.pythonhosted.org/packages/53/3e/2a8fe18a4e61cfb3417da67b6318e12691772c0696d79434184a511906dc/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747", size = 3652650, upload-time = "2025-10-10T11:13:38.181Z" }, { url = "https://files.pythonhosted.org/packages/76/36/03801461b31b29fe58d228c24388f999fe814dfc302856e0d17f97d7c54d/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f", size = 3298663, upload-time = "2025-10-10T11:13:44.878Z" }, + { url = "https://files.pythonhosted.org/packages/97/77/21b0ea2e1a73aa5fa9222b2a6b8ba325c43c3a8d54272839c991f2345656/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:32770a4d666fbdafab017086655bcddab791d7cb260a16679cc5a7338b64343b", size = 3044737, upload-time = "2025-10-30T02:55:35.69Z" }, { url = "https://files.pythonhosted.org/packages/67/69/f36abe5f118c1dca6d3726ceae164b9356985805480731ac6712a63f24f0/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d", size = 3347643, upload-time = "2025-10-10T11:13:53.499Z" }, { url = "https://files.pythonhosted.org/packages/e1/36/9c0c326fe3a4227953dfb29f5d0c8ae3b8eb8c1cd2967aa569f50cb3c61f/psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316", size = 2803913, upload-time = "2025-10-10T11:13:57.058Z" }, ] @@ -2430,6 +2476,128 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/96/ee/370c3b1908327dac967841ff723db391a02f3637c95c6898160e5ffe1060/PyJSG-0.11.10-py3-none-any.whl", hash = "sha256:10af60ff42219be7e85bf7f11c19b648715b0b29eb2ddbd269e87069a7c3f26d", size = 80763, upload-time = "2022-04-14T17:18:23.169Z" }, ] +[[package]] +name = "pyjson5" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/d9/005aaaf5077cde946282b22da9404965477fb140fa6836b52d2e0955a391/pyjson5-2.0.0.tar.gz", hash = "sha256:7ccc98586cf87dfeadfa76de8df4c9cb0c3d21d1b559e28812dd9633748d6e25", size = 305865, upload-time = "2025-10-02T00:23:02.154Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/2b/2cb73dba9ffeabd91d67577f5fc7fa67040eae6876c632214145893844da/pyjson5-2.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1e6870507311765249f6cb533d8d53e0c9b1837f2e1b0b2ba7825181bd980a48", size = 299572, upload-time = "2025-10-02T00:19:18.448Z" }, + { url = "https://files.pythonhosted.org/packages/e1/1b/ebf7d13d57fffccb2d5b7bbf609800ccf8ff09678a8a7ae6c0764b04b1c8/pyjson5-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68f931934d736bfd0d9d9c666b9495a10807821c44a7c58069b2f6a12ceb47ae", size = 157385, upload-time = "2025-10-02T00:19:19.905Z" }, + { url = "https://files.pythonhosted.org/packages/29/7c/eb6fcb6e94075bea4ab56c50d1bfb8a66d43fdc2fb67001181928dd7ddb1/pyjson5-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:47eda4d30024bfa8074a6f17145e55e60cf74a43215db99685fe6998cd0130aa", size = 151838, upload-time = "2025-10-02T00:19:20.91Z" }, + { url = "https://files.pythonhosted.org/packages/c2/7c/478456b8683bc3d964cf1ca060188b1d4bc03d01548d1449d033542aee91/pyjson5-2.0.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5cc27c08bf33c7be0622ada8ff6dc7aa203287d1e585e343e90d2f2c19f31977", size = 192102, upload-time = "2025-10-02T00:19:22.238Z" }, + { url = "https://files.pythonhosted.org/packages/0f/17/bce2b2641aa140c761807d50cf30a7e09c53d0bd8737bf63dada0e8613f4/pyjson5-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3af072ff4cb1046c17d6108fd01f4c39519c95e8aa5b2084fd6fea57379eafc", size = 176766, upload-time = "2025-10-02T00:19:23.181Z" }, + { url = "https://files.pythonhosted.org/packages/30/61/7e51cd104e4514edd21b6e0c7e841da14ba80d3372d028b62719d8cb3f9e/pyjson5-2.0.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:eec8b9067aa041a177a07596c8e800e7616e5ad87ce97836c3489f977231dc1a", size = 170313, upload-time = "2025-10-02T00:19:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7e/2398eeffafc924809d4a708588f7f697398ca095b6c399849bfd0867780a/pyjson5-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e91417ead40a468698d0eb6654685986c03afc53290b8dd58d51f28803451506", size = 196097, upload-time = "2025-10-02T00:19:25.536Z" }, + { url = "https://files.pythonhosted.org/packages/98/70/550d7d634e46a71bf00b93ec2c4a8a7d63f9629e1a5117cbf249995c0e3a/pyjson5-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:023466521ce06f314979589fcd7fa01cdcff4d7a0cd32d1545ca0d566bca7761", size = 198474, upload-time = "2025-10-02T00:19:26.495Z" }, + { url = "https://files.pythonhosted.org/packages/0f/3c/e4ef8f3ef83254de96aba69f24fa613bc1277cf34802c6b4e82cc311121f/pyjson5-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:913488fb34610b900bef4244bf99d8585b3d1431a7ba28d9538fb6b3bc34150c", size = 186299, upload-time = "2025-10-02T00:19:27.525Z" }, + { url = "https://files.pythonhosted.org/packages/e7/35/60c473c7970e4239149e7e4dcf7b10ca8712779e47be47b9d9fd076e54ea/pyjson5-2.0.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e25ca44bcb3ce47938d739c5b2bbecdefca919132b7f46a3f57a6d06a38c02f0", size = 185624, upload-time = "2025-10-02T00:19:28.532Z" }, + { url = "https://files.pythonhosted.org/packages/b5/15/876e53cc43c98ff036c05c90fa8a9ccbf704478a69ffc6efe2c9f898bf77/pyjson5-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c3192eaf57cd17c367dd1726354e992c10dfb810b4c2b87284f55f00840b2245", size = 1158339, upload-time = "2025-10-02T00:19:30.089Z" }, + { url = "https://files.pythonhosted.org/packages/82/57/2ef4f05a29c04ae1beceae2739b3428bca064f963758284b1633fccc5190/pyjson5-2.0.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6ecc3f3216aa9795a80836a1f206fc87c4d2d71f0d90228ff10f1f55b16f72c2", size = 1013974, upload-time = "2025-10-02T00:19:31.486Z" }, + { url = "https://files.pythonhosted.org/packages/fb/9a/28b8655c6c6715e15c20ab98574a793f6b3435badd0ddf67ba3ea1bd420c/pyjson5-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0e9f21938060afe6f6cd64e76f2f4849c22a7aa61ee794e9885b0a760064deb4", size = 1329463, upload-time = "2025-10-02T00:19:32.824Z" }, + { url = "https://files.pythonhosted.org/packages/6a/6a/e427b03a02ff45cc7432a7d1e16066512321f643457039c2b955449f4148/pyjson5-2.0.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:78133797816d47780b25a1cf55e66ee9cd3e00a9e3abae66040fb975c39b1d23", size = 1255206, upload-time = "2025-10-02T00:19:34.778Z" }, + { url = "https://files.pythonhosted.org/packages/28/2b/d2b4a3137842de3ba66195fa5e3da96ac8c93790c77a8d76b1d30245b327/pyjson5-2.0.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:19d7dfb9d5d32839e1b77506c6b8023f83c72f531e2d6248400ca832efdb2349", size = 1190884, upload-time = "2025-10-02T00:19:36.24Z" }, + { url = "https://files.pythonhosted.org/packages/7c/6a/e590e10b7e9f145d0e7b02fde0b0b3ffec45998fc7d454e5c64f98aff6d5/pyjson5-2.0.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:95012228c10803512f515e70af06ec11a8621ce3742226ba507ebf6e91020d8d", size = 1367769, upload-time = "2025-10-02T00:19:37.74Z" }, + { url = "https://files.pythonhosted.org/packages/df/3b/705305653470ef31f177ba8f70df6d5d85858e2f2bf3df7624a1c454b7cb/pyjson5-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4facf0fc1bcdd7d57308bbc3dfa2ad0498c8e4d76672c35f1e7976f02d3f7df8", size = 1219001, upload-time = "2025-10-02T00:19:39.586Z" }, + { url = "https://files.pythonhosted.org/packages/ca/b9/1a072afcfba936d3dabc9db08d18ca3aec03b82002be7b1d6d781019b317/pyjson5-2.0.0-cp311-cp311-win32.whl", hash = "sha256:6fb1bba20ebd3a0b26bca5ee906757a9d82652ca31730d40cd921b88245ec780", size = 114294, upload-time = "2025-10-02T00:19:41.05Z" }, + { url = "https://files.pythonhosted.org/packages/28/4d/303b9ad667d7440cfd4a45c75408cb868281b60864b917b285aba668a6f3/pyjson5-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:58b17386a96a308c8295e2c2a82526aefaa33ed2abaff84a916f90484b047cc4", size = 134665, upload-time = "2025-10-02T00:19:41.972Z" }, + { url = "https://files.pythonhosted.org/packages/8a/eb/29d8d48730b1ad0a6b3762190dd2c3f43c39f4f89e20be1b8c0015b24246/pyjson5-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:4f4cde947ea68da7df8fb91b682491bcc0b916e3adb942febe866703853d8301", size = 117666, upload-time = "2025-10-02T00:19:42.861Z" }, + { url = "https://files.pythonhosted.org/packages/d0/25/429e6cc1b6ba7a1ce730f172d8653f16dfff991de7c1122627b5d9a7dfd6/pyjson5-2.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:dbb701b2b19ef5860a2409baf7fd576af8619fdaffa96ca37e0e8e0b2f030be8", size = 300589, upload-time = "2025-10-02T00:19:44.285Z" }, + { url = "https://files.pythonhosted.org/packages/1f/58/251cc5bfcced1f18dbe36ad54b25f376ab47e8a4bcd6239c7bd69b86218e/pyjson5-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c0f29297836f4a4f8090f5bfc7b0e2b70af235c8dcfd9476a159814f734441d3", size = 159389, upload-time = "2025-10-02T00:19:45.39Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4b/4e69ccbf34f2f303e32dc0dc8853d82282f109ba41b7a9366d518751e500/pyjson5-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76d4c8d8bf56696c5b9bc3b18f51c840499e7b485817ddba89ae399fcc25c923", size = 150788, upload-time = "2025-10-02T00:19:46.454Z" }, + { url = "https://files.pythonhosted.org/packages/49/67/caa7dd84ab554d83bb68a7a27f09ed750681cd305d13feb38c2df90ccdbe/pyjson5-2.0.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e94e1a05c8a42a4828a50c520eb2330fe5732d5d04f3ebe771680f7db16f7df3", size = 188298, upload-time = "2025-10-02T00:19:47.456Z" }, + { url = "https://files.pythonhosted.org/packages/ba/39/26fffaff9ebf720a05e2867c40e2023cebe33a41e1f511e3c1b42452fe7d/pyjson5-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ab533ccd75bfda9ffd34a818f283b481e78c5c315919c4f620f69639044bdd3", size = 168159, upload-time = "2025-10-02T00:19:48.459Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c9/f7170d4903cb1526836a458f7e4650f0ff465001b7ef7066bc4b0577e601/pyjson5-2.0.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:16e9295bf9f80fc5fb63046a0df4a3adef4e945d27f61f0f6e5db0a4f1510a15", size = 169039, upload-time = "2025-10-02T00:19:49.478Z" }, + { url = "https://files.pythonhosted.org/packages/2c/d1/b84322897a861e85528c9621372441c4db57b8af615a647a9a8223e7e00a/pyjson5-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4191eced0e77207afc2f82782ef3dbee88c38ec386da8c0af9190653e8c8557f", size = 185596, upload-time = "2025-10-02T00:19:50.5Z" }, + { url = "https://files.pythonhosted.org/packages/56/3c/fea02294217c0b93f017ddc032bbacc805e669014c784b42b5cf366d4aa1/pyjson5-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9efc441991cd31a5d1fea04d8a024649bbd9a005d7e0ec6a870670b47adf43e8", size = 187665, upload-time = "2025-10-02T00:19:51.513Z" }, + { url = "https://files.pythonhosted.org/packages/10/39/de2423e6a13fb2f44ecf068df41ff1c7368ecd8b06f728afa1fb30f4ff0a/pyjson5-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:467c5e0856152bbe539e38f126f698189f1ecc4feb5292d47ad0f20472d24b6d", size = 178950, upload-time = "2025-10-02T00:19:52.591Z" }, + { url = "https://files.pythonhosted.org/packages/d4/9c/3de848f4441b95ad5f8499f7aed9b86da1c7eee776b0e673d85703416f15/pyjson5-2.0.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a2fc21d0f59c75dd3cc0a9943fface3729a4cf2e4dfbd14a90680a97bbfe23d1", size = 175149, upload-time = "2025-10-02T00:19:53.655Z" }, + { url = "https://files.pythonhosted.org/packages/44/b8/fb33760617875852f299e06aa9cd9bbaf68d2f939189736ebf9099f4f305/pyjson5-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4887291c830dbc30528833eb8cdcc44d0531626a61ac9bac80b17df369cb33", size = 1149408, upload-time = "2025-10-02T00:19:54.885Z" }, + { url = "https://files.pythonhosted.org/packages/8c/b2/ea1806e14704b5087a637a0b126ce63376f39e3762099614bca446dc7fa4/pyjson5-2.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:4a1497408a18ddd2501b1c6bdd1dd01d69809450d145c13c42913f98dfa59d20", size = 1012047, upload-time = "2025-10-02T00:19:56.254Z" }, + { url = "https://files.pythonhosted.org/packages/8d/79/bbd9e037d2758b3da79a4bf02d6234e88908ad62fd6fc299144d4efe7466/pyjson5-2.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9617abb9022fcd3d1034a5e07972dc0440af3d91da86c45f81750b6c324e9bcf", size = 1324907, upload-time = "2025-10-02T00:19:57.961Z" }, + { url = "https://files.pythonhosted.org/packages/e0/5d/f984d6008fa0dcf64624eed4334c88cdae31b48d0546a17017beea6f6978/pyjson5-2.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:247a8f29e4fecdf7ff894dd3b5759a21c5336b5e3c21ba2ee31a03b52b73a98c", size = 1243097, upload-time = "2025-10-02T00:19:59.37Z" }, + { url = "https://files.pythonhosted.org/packages/14/dc/c07f02d3e5f307540f884cb9ae1c2b17849ebcbf112f81663abe8ca04511/pyjson5-2.0.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:6a464e605113b09d2f235fc6d7df8425831bbe40078fe6755b30058b8a904694", size = 1181197, upload-time = "2025-10-02T00:20:00.893Z" }, + { url = "https://files.pythonhosted.org/packages/1a/59/6cf634b199a4e71cb11cc8157d3c8c0baea1d8c89b2bea3bf83a482ac742/pyjson5-2.0.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d355134c9735f3eb3724f3985551203976c823909aec118f616b8da096ffd9b5", size = 1356466, upload-time = "2025-10-02T00:20:02.497Z" }, + { url = "https://files.pythonhosted.org/packages/1d/f1/ae443709da9396396545c1ecfc30fd2f69629a65e894341a72fa286f0c26/pyjson5-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c3353d214db15d6b05d941cdb2fc2e3d1c94650e5baecc6986424f20ebe76d1", size = 1211084, upload-time = "2025-10-02T00:20:03.99Z" }, + { url = "https://files.pythonhosted.org/packages/28/a7/291e4ac2890dd94f773aa7fe606ffb7b5424ad5c21d888feccb0b0fbf76b/pyjson5-2.0.0-cp312-cp312-win32.whl", hash = "sha256:9f164c973f0d6b79ed3c92a4bb5506b04c810dcf84dc48b543d968ec0acfbfc8", size = 115425, upload-time = "2025-10-02T00:20:40.058Z" }, + { url = "https://files.pythonhosted.org/packages/af/cb/cf69e6e080149b8993d553c683d364e714c6646f70f55b7c135efe942366/pyjson5-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:296cb2e2c6f64dc61397bd48f04569f1532cd9062d8ebca29ed02644b298e4fc", size = 135552, upload-time = "2025-10-02T00:20:41.392Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f7/b7784d5dd52a34f23efd4118bf856877a8f15bb2a53c43c192e4dee7d10f/pyjson5-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:b36fa4a4b6f632bbc2afc4caaa16e7f585cd2345de85a439e6ce734f915b8018", size = 116874, upload-time = "2025-10-02T00:20:42.379Z" }, + { url = "https://files.pythonhosted.org/packages/74/f0/a0273fa863a96fb450336f5c8f3126cd1fefe17bd60451fd66dc58d0ab6c/pyjson5-2.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6840b70981cb838e025a9f952004c6b59655c91076067abf01317fc10681cd7b", size = 299171, upload-time = "2025-10-02T00:20:43.467Z" }, + { url = "https://files.pythonhosted.org/packages/e0/8c/402811e522cbed81f414056c1683c129127034a9f567fa707200c3c67cf7/pyjson5-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd89ea40f33d1d835493ab0fc3b7b4d7c0c40254e0ddeefde08e0e9d98aebbde", size = 158725, upload-time = "2025-10-02T00:20:44.537Z" }, + { url = "https://files.pythonhosted.org/packages/2f/00/f2392fe52b50aadf5037381a52f9eda0081be6c429d9d85b47f387ecda38/pyjson5-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dc47fe45e5c20137ac10e8f2d27985d97e67fa71410819a576fa21f181b8e94b", size = 150027, upload-time = "2025-10-02T00:20:45.54Z" }, + { url = "https://files.pythonhosted.org/packages/36/5c/e3f18bb7059e4e4992b76bf2e9d8594615361313df2fb78b4c08d441a8a3/pyjson5-2.0.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eb4e885db6fe2421735b913f43028578a30dbf9f4c86673649b52bbee91231a9", size = 187241, upload-time = "2025-10-02T00:20:46.869Z" }, + { url = "https://files.pythonhosted.org/packages/ae/96/1d9cf5bf5ea863d61ab977f6e9842c8519ff430dbceb58580e06deb1dd4a/pyjson5-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b56f404b77f6b6d4a53b74c4d3f989d33b33ec451d7b178dad43d2fb81204dc", size = 168678, upload-time = "2025-10-02T00:20:47.871Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f4/d0704fef397d0d28d1fc16f4577883331d46b6a2f2eb59c4cc1a364b19f9/pyjson5-2.0.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:20db35f29815572130ec8d539c2465c1e4e7c7677298d6f79216bda611577709", size = 169324, upload-time = "2025-10-02T00:20:48.829Z" }, + { url = "https://files.pythonhosted.org/packages/df/8c/84eeafe750d04016aedb24cb02959e65a42ef09de675d0dca96013baf199/pyjson5-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:445a21f0a6333f352251e7cb5a8f471ce44e7d74892558bd256e0bb889c1961e", size = 184377, upload-time = "2025-10-02T00:20:50.41Z" }, + { url = "https://files.pythonhosted.org/packages/9a/80/119b2b01ae625d06ab1d6d5b021f4988fea28cf0ce8921b83ee6f944a1ab/pyjson5-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1bbabb12147f85850ba3b6a5813a3e9cc417ac9d0a66d57af42dd714f563b51e", size = 186931, upload-time = "2025-10-02T00:20:51.642Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d3/82f366ccadbe8a250e1b810ffa4a33006f66ec287e382632765b63758835/pyjson5-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49f490d68bebfccb1aa01b612beef3abffa720c4069d82d74af8b55cf15cd214", size = 180127, upload-time = "2025-10-02T00:20:52.99Z" }, + { url = "https://files.pythonhosted.org/packages/65/e2/8b96a72e8ab2e92c3748feafcec79f3e6219bf5289e5b053da7fe7fcb3f3/pyjson5-2.0.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:06cd493d607d94e841b6a8452f33bb45f55430ff33c992b8c4b671f8bebd2a14", size = 175413, upload-time = "2025-10-02T00:20:54.552Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9d/ea8542d9184616bedc3c7d8d8ac32d7e82fa4e347da08744b81cbffe00e3/pyjson5-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9eea8981d20bf6c37939c013c51ea1e7c9252429b01002a51afce59081b9ae0f", size = 1150022, upload-time = "2025-10-02T00:20:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/6d/af/8b8060bb9609bf4ad0bfc6fb9f52373aada55c93880c9597e41aecc2d266/pyjson5-2.0.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:863a0688a090e8c0add0d769ddf51e2cd48edd1d585f34272e7b4f095593175b", size = 1011750, upload-time = "2025-10-02T00:20:57.505Z" }, + { url = "https://files.pythonhosted.org/packages/14/3a/9e49bbecc03ebc21c0b45a4f51e74c87c5250822e6bcffb8f8bcf9e800fd/pyjson5-2.0.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a4a0e0835d7a5c7b18c3333dd01940ee2d160560e50851803cfaab27cc298df3", size = 1324079, upload-time = "2025-10-02T00:20:58.882Z" }, + { url = "https://files.pythonhosted.org/packages/2f/94/951c1f531a5369d8859e42a5ac60c7dacf4d8585bb25f37ca7bdd46b9cb1/pyjson5-2.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:42f3d404367f7365325be1f1460c515d40022d41bece841d47cf00e616967308", size = 1243622, upload-time = "2025-10-02T00:21:00.452Z" }, + { url = "https://files.pythonhosted.org/packages/99/0b/edb91338101501f1ec18f003e2a8da7650409537f446c7db96d302c7870d/pyjson5-2.0.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:3765c07dc1cd5b954a3e793c73c5725bac5431b83f7c807d695d73bbf78ae431", size = 1182052, upload-time = "2025-10-02T00:21:02.139Z" }, + { url = "https://files.pythonhosted.org/packages/64/f2/54e28fd04aa27375ec4baa447fd58a894cf3cfd20c6a0dad160ee8ec115c/pyjson5-2.0.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:51d33381fc268989d6ba3b6ff44e45b634ee490fc658704d04eca59ed9f8b53d", size = 1357131, upload-time = "2025-10-02T00:21:03.643Z" }, + { url = "https://files.pythonhosted.org/packages/ac/1a/80b50d0fae42cf58e1a37f5b87543c445bb1781ffcc69c94cc73ed397d67/pyjson5-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9f42e70d01668ccff505de17a9358fd09b26f9de037dbc8f1476215f217d3dc1", size = 1212220, upload-time = "2025-10-02T00:21:05.044Z" }, + { url = "https://files.pythonhosted.org/packages/39/fc/44fb44d5b915fc1c871aea2947d87b4cfd77c9f6673ffdaf4e41b7365a46/pyjson5-2.0.0-cp313-cp313-win32.whl", hash = "sha256:62e02fd3a4aa7bc48d9ad04dbd22076d4c33c8161df2f72cdbd8588b8634cb5d", size = 115225, upload-time = "2025-10-02T00:21:06.277Z" }, + { url = "https://files.pythonhosted.org/packages/e9/60/d28dcdc482ed36196ee7523f47b1869f92a998777d46c80cf84ec1c8c962/pyjson5-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:5318cd5e7d130fb2532c0d295a5c914ee1ab629bc0c57b1ef625bddb272442c4", size = 135384, upload-time = "2025-10-02T00:21:07.284Z" }, + { url = "https://files.pythonhosted.org/packages/79/3e/14be4a4efa651dab867057d81b4d56b1c9d5328418ca0b1d08d5e953e8d7/pyjson5-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:b274a6c6affca4a3210359bf486940ee08dbc9875f896ab19a14e344d9bbf322", size = 116783, upload-time = "2025-10-02T00:21:08.713Z" }, + { url = "https://files.pythonhosted.org/packages/79/25/4a81e6d5611b38806e8f87a5b1cf4cbac21b9781c1cbba02c8e43ebd9664/pyjson5-2.0.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:6ae6b65bc5a45e853b462d840fc32be1df4dab8dbd48b1ff3078b8dac2df2f09", size = 301159, upload-time = "2025-10-02T00:21:09.745Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f4/8c948e8a8b1a518fe87a114df1d58ab5f80b55b6601b64f8649438293bfd/pyjson5-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6b24990927f723c2fff183ec7e14507f8ae3ce22743ac312aa9bf1327f9153dd", size = 159730, upload-time = "2025-10-02T00:21:11.946Z" }, + { url = "https://files.pythonhosted.org/packages/39/1b/9cd7acea4c0e5a4ed44a79b99fc7e3a50b69639ea9f926efc35d660bef04/pyjson5-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a84949318c52844ced26622a733ca54215ccfa9ee87eb38f1c92ee1ed5994827", size = 151029, upload-time = "2025-10-02T00:21:12.953Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ff/136636d1ab42f98c55011d2b25a45b3f1107bef10248506d6bf549c8eabd/pyjson5-2.0.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:10fa949fd41e8583170e2b8404c026d8e088d370428b87270a3a8df5a09ffac5", size = 187718, upload-time = "2025-10-02T00:21:14.225Z" }, + { url = "https://files.pythonhosted.org/packages/e0/97/e104682432b02f1458de22478d2b62caa607426e8284bec4680a3537cadd/pyjson5-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ccbc7a0cf1d9b8c0851b84601650ce9772e526a1a444633be6827aa162c20b54", size = 171291, upload-time = "2025-10-02T00:21:15.322Z" }, + { url = "https://files.pythonhosted.org/packages/a2/91/bf4eacd990f93f8b5afe717f915ed248595261fcfb47e7718e17c55f5069/pyjson5-2.0.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4e193346ab7c49605be4ec240c81d91014a276a163d5bba67eb53e64f425cecf", size = 168555, upload-time = "2025-10-02T00:21:16.519Z" }, + { url = "https://files.pythonhosted.org/packages/24/70/fc2147cade7bd91c4d3726a200ae9556bcb45e294d8c57a904f15da16eea/pyjson5-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:25e9b32e21d4928201e2c410bafd196b0a4f0034761378821e99fc80c21ed0e3", size = 185817, upload-time = "2025-10-02T00:21:17.628Z" }, + { url = "https://files.pythonhosted.org/packages/01/48/a8c396f25b53880bd06beb11ea8f63a42a6b8f9b82d42cc0cf6b0df8ca9f/pyjson5-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:63b0300e5ea302c107e518ef185c6f4ab8af49a5d4a52ed93e3e287fa8a6c69f", size = 188903, upload-time = "2025-10-02T00:21:19.058Z" }, + { url = "https://files.pythonhosted.org/packages/7c/a3/8ffe10a49652bfd769348c6eca577463c2b3938baab5e62f3896fc5da0b7/pyjson5-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:72f5b5832d2c3055be492cf9853ce7fe57b57cc5e664f1327f10211cbd1114ef", size = 180252, upload-time = "2025-10-02T00:21:20.174Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f0/801b0523f679a9bd5356210be9a9b074fc14e0e969f2ed1f789cf6af3c45/pyjson5-2.0.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:da790aeb2dd88be1c94ea95b5ff4915614109e9e025df7f0936dadc01ae21e0b", size = 175965, upload-time = "2025-10-02T00:21:21.252Z" }, + { url = "https://files.pythonhosted.org/packages/ea/04/ab703bccebc02c31056a525b7f06c473f141dc5bf96fe314893911a7b9ad/pyjson5-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ee211f71e3d0e7550c09b407dc75d01bbe6d5ed2ac7ee6aa54f870ebe17541aa", size = 1151968, upload-time = "2025-10-02T00:21:22.982Z" }, + { url = "https://files.pythonhosted.org/packages/70/18/5c665a34ef6123d4c4f70173e30f533bbcf36ca76e3fa7c03b8400b2e34c/pyjson5-2.0.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:bf8e84ac6d58380b5fda77985f7acea5afe45bd45e24e77aca0a6912d25222fc", size = 1009858, upload-time = "2025-10-02T00:21:24.305Z" }, + { url = "https://files.pythonhosted.org/packages/f1/bb/7641ee31fedbe337f5c7ed505b8491a96a94fdcc1567b0b1b2b3633ec755/pyjson5-2.0.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f0dd8b38187d0c2e741d40b9b348328172d0c894a90457f53b22e0f470b19009", size = 1324909, upload-time = "2025-10-02T00:21:25.874Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7f/4cd19d65074d85ad583ff0517e3771af8dd3e87a40d6c25bdb81d38ff0b4/pyjson5-2.0.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:4ac06acc8ffa5686abad2220dbbef89f99694f1f6ddb70e4ec5455bf9fd91176", size = 1245254, upload-time = "2025-10-02T00:21:27.762Z" }, + { url = "https://files.pythonhosted.org/packages/54/26/0b96502136c4e74fa508e5a129119bd2df235dfd165acb0d74043e7fe6f0/pyjson5-2.0.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:34d2700a9472817c043a18d711ee8fd7bb6270dbd4013473d9aac51cef6a7d77", size = 1182526, upload-time = "2025-10-02T00:21:29.433Z" }, + { url = "https://files.pythonhosted.org/packages/4c/34/e704bb86cd56092771589a08d1705d1e1310bdb955a752b26f483f7cd7c9/pyjson5-2.0.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:daf0e3ecf4f7888735050e1e4dc6f25f2f523706cf42de5c3f665042311db9dc", size = 1359472, upload-time = "2025-10-02T00:21:31.4Z" }, + { url = "https://files.pythonhosted.org/packages/0d/fe/d9b6e1a1e4e4d08b3f9b022e92b93abf7baab5c959296faf10aa89cf17b2/pyjson5-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93580c6dcfb3f4f189c2a8477d9bf262cbc31878cd809c118ddc6b1bb8d6f645", size = 1212271, upload-time = "2025-10-02T00:21:32.796Z" }, + { url = "https://files.pythonhosted.org/packages/0b/0d/c4de90f7b1aecbc24bacc2ea4582e344043e8587c18596649950e877f5aa/pyjson5-2.0.0-cp314-cp314-win32.whl", hash = "sha256:dc53188059c2a73c8ddd0d17eaf970210a0ba48805e2178dfc8e71c063668d80", size = 118268, upload-time = "2025-10-02T00:22:01.555Z" }, + { url = "https://files.pythonhosted.org/packages/52/8c/1bb60288c4d480a0b51e376a17d6c4d932dc8420989d1db440e3b284aad5/pyjson5-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:36ab5b8fcf1585623d12519f55e3efddbcbba6a0072e7168b4a3f48e3d4c64bb", size = 137772, upload-time = "2025-10-02T00:22:02.577Z" }, + { url = "https://files.pythonhosted.org/packages/53/ea/c5e9e5a44b194851347698b5065df642d42852641d32da0c71626f60f3fc/pyjson5-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:371a8ee3d8c5f128f8024c5afc776b661043c2b2672de83a22ed6a4a289522f9", size = 121372, upload-time = "2025-10-02T00:22:03.666Z" }, + { url = "https://files.pythonhosted.org/packages/05/13/1391b985d3cded0038816d07a5d68e9f525a2b304a258e890bb5a4e2c64a/pyjson5-2.0.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:111d4f3b384a41eae225bce1709c745c1aeafd51214bcd850469c5c34167856c", size = 322542, upload-time = "2025-10-02T00:21:33.993Z" }, + { url = "https://files.pythonhosted.org/packages/24/c9/391def485564be4700e8baaa9a67292ed64a316050f625b84ef43358fbcc/pyjson5-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:15bc0bc456d2b101c469f57d0301a9624be682302d9ded569d5976c2c3b1130e", size = 169901, upload-time = "2025-10-02T00:21:35.081Z" }, + { url = "https://files.pythonhosted.org/packages/d7/9c/2612e236a40eac86fba453dc9db1c334b4fb77ac5d1630498b0e3a0fd8d3/pyjson5-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:151ea53ec2ce1c014c58ee755d3113af80dc44cb8ca1008eabb829cd1001ea7b", size = 161759, upload-time = "2025-10-02T00:21:36.543Z" }, + { url = "https://files.pythonhosted.org/packages/42/6f/f62b823d2e52ee7ddb25761b4bc8286c08199f6d42ddd1f01e8cb48a55a0/pyjson5-2.0.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:92fb2ae9e367fc585f93573222bfa2512c6fe85703658f96adbebd8459b16d0c", size = 184972, upload-time = "2025-10-02T00:21:37.646Z" }, + { url = "https://files.pythonhosted.org/packages/02/72/2bca65d3ad6f19386fd0e350f66c7153c09173ca9a4742d4108d07e73f78/pyjson5-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a59fcaf3927277a385f17863077d474f7451b1471ddcf6acdd28c76950d4c868", size = 172446, upload-time = "2025-10-02T00:21:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/48/ec/752cf626a6caa69bf63fea4a7a47c9c57130578de502198105c3e2c5a55f/pyjson5-2.0.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10cc1d0afd26479b2643ad3a67211e98fa72aa66030bbb695bb03d34cea2f801", size = 165790, upload-time = "2025-10-02T00:21:39.752Z" }, + { url = "https://files.pythonhosted.org/packages/80/a6/1b41a3f87e899d7b1c48e5fb45d1d306c478708806286f113a0495c13261/pyjson5-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c69f3b28b669e26b11766a200b7d0d8bbfbd9a48735e39b9675e8fb8d6a99744", size = 188500, upload-time = "2025-10-02T00:21:40.789Z" }, + { url = "https://files.pythonhosted.org/packages/c1/da/c9769cff5ce6b1c7e4b7e169fa1191bb2b6562849069ca11f79be6ed98d1/pyjson5-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:05d08aeb21bf547e1de4749d22b5638405aca12ba866b762d25d84575d327327", size = 193060, upload-time = "2025-10-02T00:21:41.885Z" }, + { url = "https://files.pythonhosted.org/packages/31/ef/a97738263b05d91189df4e081d2331389ec95f662d26242f678b53b7d9d7/pyjson5-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:321e107c7df19d281e858bcfdbb39282b8cc1163a1e8c142b9d91af1e1db8573", size = 181832, upload-time = "2025-10-02T00:21:42.959Z" }, + { url = "https://files.pythonhosted.org/packages/f0/15/2170f05792bddace7136100c30bdf73ec54fbed7ae86eb17f42e882238ec/pyjson5-2.0.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:66dceb6b83990bf81accbbc1a56897f1bb302b7da063d5eb2d756f26c4e98389", size = 178943, upload-time = "2025-10-02T00:21:44.041Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e6/a7f40e1bfa312f1987577c583b4dc1008e05f016585f0858d527e7d6e48d/pyjson5-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2de1242c168735ac589c2ca5708f95bd3d47c50f59464042316b56d77d807cae", size = 1153787, upload-time = "2025-10-02T00:21:45.727Z" }, + { url = "https://files.pythonhosted.org/packages/cc/e3/4efcc86258a63c5c8af79fd8fe06e0ff98cebcc56facf473dba3318455a3/pyjson5-2.0.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:505dd929b620886c4bcf2ba19ca842dc5606ed1ad1fe5003cc09fbd2d910b0ef", size = 1014990, upload-time = "2025-10-02T00:21:47.134Z" }, + { url = "https://files.pythonhosted.org/packages/e5/15/e7f1bc7aeb2c9f008a83c3e9129b4b16e1e27b2ae463efe05cfc8320ea68/pyjson5-2.0.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:48fb751c641fd03b5f002dc47a040aca9eec0a8a9bc11bc77e86dc40a6c3f10e", size = 1322761, upload-time = "2025-10-02T00:21:48.727Z" }, + { url = "https://files.pythonhosted.org/packages/37/30/d937dfcb8386841571f7eda2b78b716ece4d62a10ce9a71f9dc8e02269fe/pyjson5-2.0.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d67186c0a70308da9752202e8dcc6fcf63991d8a2aa4cfa463a587a3cbb6416c", size = 1247709, upload-time = "2025-10-02T00:21:50.485Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d6/ca54b0953f45bd89317f5069c8cb096df33c391ae2166259c273981c4884/pyjson5-2.0.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:0a9c0901313c8cf36f6f72cfc76b3ef335723fd240c869bc80a8711567573252", size = 1185323, upload-time = "2025-10-02T00:21:52.27Z" }, + { url = "https://files.pythonhosted.org/packages/46/eb/eaa0c7eef752ea2afb192ff3f15cb79fa5229ab22cf84c0b941a0671364f/pyjson5-2.0.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:918175822878b4a48949af6fa236ccb2189b6548df14077b97246b61baff2ba7", size = 1360604, upload-time = "2025-10-02T00:21:53.819Z" }, + { url = "https://files.pythonhosted.org/packages/5f/ca/192931f334270fa941977a9beb2590d40fe460711d932b825c3882f100de/pyjson5-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7a09dac1228517792d8941718194ee5e4aa55ed604e0616938e55d75aedcb0c1", size = 1214048, upload-time = "2025-10-02T00:21:55.338Z" }, + { url = "https://files.pythonhosted.org/packages/c2/61/63bd6351bd88e7158380eabf182beb377b53c4812175db3cde82fb2ad16e/pyjson5-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:caeee4168841a4d061f0e33cd162ae45fedbe9be9ed3dbd839d76d7791858dcf", size = 138873, upload-time = "2025-10-02T00:21:56.903Z" }, + { url = "https://files.pythonhosted.org/packages/f6/ee/f856f8e18336a96ad7a7561dc482f776fa3c236ca278820f1ad4d7e04bba/pyjson5-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7121183c7be324bdb6e824fc047ac29ad676025506e3cdbad6def5c4af9247d4", size = 168332, upload-time = "2025-10-02T00:21:58.038Z" }, + { url = "https://files.pythonhosted.org/packages/62/9d/17ac8aacb439c79a912a57ee105bb060c6c10d40eab587928215e2022e5e/pyjson5-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:f5e151599913b0c6e3bc3e176951f48039457e8a4b14f59c1ffffb8580ab58ea", size = 127386, upload-time = "2025-10-02T00:22:00.217Z" }, +] + +[[package]] +name = "pymarkdownlnt" +version = "0.9.33" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "application-properties" }, + { name = "columnar" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/15/cb76156751bafceebba839aa3f679940dda4fadee20b8864a4d36a9b1644/pymarkdownlnt-0.9.33.tar.gz", hash = "sha256:4486eb34fed9b66d2e4e91c5b5159b5242b816a7847585aa2a94afc58cc5583d", size = 422332, upload-time = "2025-10-24T03:17:15.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/b1/04c96cbbf8a209ff4059560c2c5c3e4be23926ce397699f09e4551510703/pymarkdownlnt-0.9.33-py3-none-any.whl", hash = "sha256:c5341a6f2539d087f76c88cda2dfc0c39e7632ebae915ea9399a083030951f35", size = 505133, upload-time = "2025-10-24T03:17:14.05Z" }, +] + [[package]] name = "pyoxigraph" version = "0.4.11" @@ -3389,6 +3557,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, ] +[[package]] +name = "toolz" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/d6/114b492226588d6ff54579d95847662fc69196bdeec318eb45393b24c192/toolz-1.1.0.tar.gz", hash = "sha256:27a5c770d068c110d9ed9323f24f1543e83b2f300a687b7891c1a6d56b697b5b", size = 52613, upload-time = "2025-10-17T04:03:21.661Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl", hash = "sha256:15ccc861ac51c53696de0a5d6d4607f99c210739caf987b5d2054f3efed429d8", size = 58093, upload-time = "2025-10-17T04:03:20.435Z" }, +] + [[package]] name = "tqdm" version = "4.67.1" @@ -3506,6 +3683,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, ] +[[package]] +name = "wcwidth" +version = "0.2.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, +] + [[package]] name = "webcolors" version = "24.11.1"