diff --git a/.github/pre-commit b/.github/pre-commit index 0dc61a141..c7370e218 100755 --- a/.github/pre-commit +++ b/.github/pre-commit @@ -41,8 +41,8 @@ fi if ! poetry run mypy . then echo "" - echo "There are some python code style issues." - echo "Fix the warnings returned by `poetry run mypy .` first." + echo "There are some typing issues." + echo "Fix the warnings returned by 'poetry run mypy .' first." exit 1 fi diff --git a/.github/workflows/ci-py.yml b/.github/workflows/ci-py.yml index fa041b0aa..5781aa51d 100644 --- a/.github/workflows/ci-py.yml +++ b/.github/workflows/ci-py.yml @@ -35,6 +35,7 @@ jobs: python: - 'quantinuum-hugr-py/**' - 'pyproject.toml' + - 'specification/schema/**' check: needs: changes @@ -74,16 +75,54 @@ jobs: - name: Run tests run: poetry run pytest + # Ensure that the serialization schema is up to date + serialization-schema: + needs: [changes] + if: ${{ needs.changes.outputs.python == 'true' }} + name: Check serialization schema + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Run sccache-cache + uses: mozilla-actions/sccache-action@v0.0.3 + - name: Install poetry + run: pipx install poetry + - name: Set up Python + uses: actions/setup-python@v3 + with: + python-version: "3.10" + cache: "poetry" + - name: Install the project libraries + run: poetry install + - name: Generate the updated schema + run: | + poetry run python scripts/generate_schema.py specification/schema/ + - name: Check if the schema is up to date + run: | + git diff --exit-code --name-only specification/schema/ + if [ $? -ne 0 ]; then + echo "The serialization schema is not up to date" + echo "Please run 'just update-schema' and commit the changes" + exit 1 + fi + # This is a meta job to mark successful completion of the required checks, # even if they are skipped due to no changes in the relevant files. required-checks: name: Required checks 🐍 - needs: [changes, check] - if: always() + needs: [changes, check, serialization-schema] + if: ${{ !cancelled() }} runs-on: ubuntu-latest steps: - name: Fail if required checks failed - if: (failure() || cancelled()) + # This condition should simply be `if: failure() || cancelled()`, + # but there seems to be a bug in the github workflow runner. + # + # See https://github.com/orgs/community/discussions/80788 + if: | + needs.changes.result == 'failure' || needs.changes.result == 'cancelled' || + needs.check.result == 'failure' || needs.check.result == 'cancelled' || + needs.serialization-schema.result == 'failure' || needs.serialization-schema.result == 'cancelled' run: | echo "Required checks failed" echo "Please check the logs for more information" diff --git a/.github/workflows/ci-rs.yml b/.github/workflows/ci-rs.yml index 3f8176dac..04b4d0b48 100644 --- a/.github/workflows/ci-rs.yml +++ b/.github/workflows/ci-rs.yml @@ -134,11 +134,18 @@ jobs: required-checks: name: Required checks 🦀 needs: [changes, check, tests-stable] - if: always() + if: ${{ !cancelled() }} runs-on: ubuntu-latest steps: - name: Fail if required checks failed - if: (failure() || cancelled()) + # This condition should simply be `if: failure() || cancelled()`, + # but there seems to be a bug in the github workflow runner. + # + # See https://github.com/orgs/community/discussions/80788 + if: | + needs.changes.result == 'failure' || needs.changes.result == 'cancelled' || + needs.check.result == 'failure' || needs.check.result == 'cancelled' || + needs.tests-stable.result == 'failure' || needs.tests-stable.result == 'cancelled' run: | echo "Required checks failed" echo "Please check the logs for more information" diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 110a4cf6e..53f0f611b 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -29,6 +29,8 @@ shell by setting up [direnv](https://devenv.sh/automatic-shell-activation/). To setup the environment manually you will need: - Rust: https://www.rust-lang.org/tools/install +- Just: https://just.systems/ +- Poetry: https://python-poetry.org/ You can use the git hook in [`.github/pre-commit`](.github/pre-commit) to automatically run the test and check formatting before committing. To install it, run: @@ -44,8 +46,10 @@ ln -s .github/pre-commit .git/hooks/pre-push To compile and test the rust code, run: ```bash -cargo build -cargo test +# Rust tests +just test # or `cargo test` +# Python tests +just pytest ``` Run the benchmarks with: @@ -62,6 +66,8 @@ stable available. cargo +nightly miri test ``` +Run `just` to see all available commands. + ## 💅 Coding Style The rustfmt tool is used to enforce a consistent rust coding style. The CI will fail if the code is not formatted correctly. @@ -70,13 +76,19 @@ To format your code, run: ```bash # Format rust code -cargo fmt +just format +``` + +We also use various linters to catch common mistakes and enforce best practices. To run these, use: + +```bash +just check ``` -We also check for clippy warnings, which are a set of linting rules for rust. To run clippy, run: +To quickly fix common issues, run: ```bash -cargo clippy --all-targets +just fix ``` ## 📈 Code Coverage @@ -85,9 +97,19 @@ We run coverage checks on the CI. Once you submit a PR, you can review the line-by-line coverage report on [codecov](https://app.codecov.io/gh/CQCL/hugr/commits?branch=All%20branches). -To run the coverage checks locally, install `cargo-llvm-cov`, generate the report with: +To run the coverage checks locally, first install `cargo-llvm-cov`. + +```bash +cargo install cargo-llvm-cov +``` + +Then run the tests: + ```bash -cargo llvm-cov --lcov > lcov.info +# Rust test coverage +just coverage +# Python test +just pycoverage ``` and open it with your favourite coverage viewer. In VSCode, you can use diff --git a/codecov.yml b/codecov.yml index aec1d6163..15ae701a4 100644 --- a/codecov.yml +++ b/codecov.yml @@ -15,6 +15,7 @@ coverage: # Ignore tests and binaries ignore: - "quantinuum-hugr-py/tests" + - "scripts" # Coverage groups config flag_management: diff --git a/devenv.nix b/devenv.nix index 43ffbefa4..a36625cce 100644 --- a/devenv.nix +++ b/devenv.nix @@ -30,6 +30,14 @@ in export LLVM_PROFDATA="${pkgs.llvmPackages_16.libllvm}/bin/llvm-profdata" ''; + languages.python = { + enable = true; + poetry = { + enable = true; + activate.enable = true; + }; + }; + # https://devenv.sh/languages/ # https://devenv.sh/reference/options/#languagesrustversion languages.rust = { diff --git a/justfile b/justfile index 941274c20..d8a83162f 100644 --- a/justfile +++ b/justfile @@ -35,3 +35,7 @@ pytest: # Generate a python test coverage report pycoverage: poetry run pytest --cov=./ --cov-report=html + +# Update the HUGR schema +update-schema: + poetry run python scripts/generate_schema.py specification/schema/ diff --git a/poetry.lock b/poetry.lock index 6aaaf3166..767ea27a8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,16 @@ # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + [[package]] name = "colorama" version = "0.4.6" @@ -187,6 +198,116 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pydantic" +version = "2.6.4" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, + {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.3" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, + {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, + {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, + {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, + {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, + {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, + {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pytest" version = "8.1.1" @@ -236,6 +357,9 @@ python-versions = ">=3.10" files = [] develop = true +[package.dependencies] +pydantic = "^2.6.4" + [package.source] type = "directory" url = "quantinuum-hugr-py" diff --git a/quantinuum-hugr-py/pyproject.toml b/quantinuum-hugr-py/pyproject.toml index ae1b60f8f..0db399359 100644 --- a/quantinuum-hugr-py/pyproject.toml +++ b/quantinuum-hugr-py/pyproject.toml @@ -13,6 +13,7 @@ repository = "https://github.com/CQCL/hugr" [tool.poetry.dependencies] python = ">=3.10" +pydantic = "^2.6.4" [tool.pytest.ini_options] # Lark throws deprecation warnings for `src_parse` and `src_constants`. diff --git a/quantinuum-hugr-py/src/quantinuum_hugr/serialization/__init__.py b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/__init__.py new file mode 100644 index 000000000..ad1db81e8 --- /dev/null +++ b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/__init__.py @@ -0,0 +1,3 @@ +from .serial_hugr import SerialHugr + +__all__ = ["SerialHugr"] diff --git a/quantinuum-hugr-py/src/quantinuum_hugr/serialization/ops.py b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/ops.py new file mode 100644 index 000000000..2bc9824dd --- /dev/null +++ b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/ops.py @@ -0,0 +1,560 @@ +import inspect +import sys +from abc import ABC +from typing import Any, Literal, cast + +from pydantic import BaseModel, Field, RootModel + +from . import tys +from .tys import ( + ExtensionId, + ExtensionSet, + FunctionType, + PolyFuncType, + Type, + TypeRow, +) + +NodeID = int + + +class BaseOp(ABC, BaseModel): + """Base class for ops that store their node's input/output types""" + + # Parent node index of node the op belongs to, used only at serialization time + parent: NodeID + input_extensions: ExtensionSet = Field(default_factory=ExtensionSet) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + """Hook to insert type information from the input and output ports into the + op""" + + def insert_child_dfg_signature(self, inputs: TypeRow, outputs: TypeRow) -> None: + """Hook to insert type information from a child dataflow graph""" + + def display_name(self) -> str: + """Name of the op for visualisation""" + return self.__class__.__name__ + + +# ---------------------------------------------------------- +# --------------- Module level operations ------------------ +# ---------------------------------------------------------- + + +class Module(BaseOp): + """The root of a module, parent of all other `ModuleOp`s.""" + + op: Literal["Module"] = "Module" + + +class FuncDefn(BaseOp): + """A function definition. Children nodes are the body of the definition.""" + + op: Literal["FuncDefn"] = "FuncDefn" + + name: str + signature: PolyFuncType = Field(default_factory=PolyFuncType.empty) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert len(in_types) == 0 + assert len(out_types) == 1 + out = out_types[0] + assert isinstance(out, PolyFuncType) + self.signature = out # TODO: Extensions + + +class FuncDecl(BaseOp): + """External function declaration, linked at runtime.""" + + op: Literal["FuncDecl"] = "FuncDecl" + name: str + signature: PolyFuncType = Field(default_factory=PolyFuncType.empty) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert len(in_types) == 0 + assert len(out_types) == 1 + out = out_types[0] + assert isinstance(out, PolyFuncType) + self.signature = out + + +class ConstBase(BaseOp): + """A constant operation definition.""" + + op: Literal["Const"] = "Const" + + +CustomConst = Any # TODO + + +class ExtensionConst(ConstBase): + """An extension constant value, that can check it is of a given [CustomType].""" + + c: Literal["Extension"] = Field("Extension", title="ConstTag") + e: CustomConst = Field(title="CustomConst") + + class Config: + json_schema_extra = { + "required": ["parent", "op", "c", "e"], + } + + +class FunctionConst(ConstBase): + """A higher-order function value.""" + + c: Literal["Function"] = Field("Function", title="ConstTag") + hugr: Any # TODO + + class Config: + json_schema_extra = { + "required": ["parent", "op", "c", "hugr"], + } + + +class Tuple(ConstBase): + """A constant tuple value.""" + + c: Literal["Tuple"] = Field("Tuple", title="ConstTag") + vs: list["Const"] + + class Config: + json_schema_extra = { + "required": ["parent", "op", "c", "vs"], + } + + +class Sum(ConstBase): + """A Sum variant + + For any Sum type where this value meets the type of the variant indicated by the tag + """ + + c: Literal["Sum"] = Field("Sum", title="ConstTag") + tag: int + typ: Type + vs: list["Const"] + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "A Sum variant For any Sum type where this value meets the type " + "of the variant indicated by the tag." + ), + "required": ["parent", "op", "c", "tag", "typ", "vs"], + } + + +class Const(RootModel): + """A constant operation.""" + + root: ExtensionConst | FunctionConst | Tuple | Sum = Field(discriminator="c") + + +# ----------------------------------------------- +# --------------- BasicBlock types ------------------ +# ----------------------------------------------- + + +class DataflowBlock(BaseOp): + """A CFG basic block node. The signature is that of the internal Dataflow + graph.""" + + op: Literal["DataflowBlock"] = "DataflowBlock" + inputs: TypeRow = Field(default_factory=list) + other_outputs: TypeRow = Field(default_factory=list) + sum_rows: list[TypeRow] = Field(default_factory=list) + extension_delta: ExtensionSet = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + num_cases = len(out_types) + self.sum_rows = [[] for _ in range(num_cases)] + + def insert_child_dfg_signature(self, inputs: TypeRow, outputs: TypeRow) -> None: + self.inputs = inputs + pred = outputs[0] + assert isinstance(pred, tys.UnitSum | tys.GeneralSum) + if isinstance(pred, tys.UnitSum): + self.sum_rows = [[] for _ in range(cast(tys.UnitSum, pred).size)] + else: + assert isinstance(pred, tys.GeneralSum) + self.sum_rows = [] + for variant in pred.rows: + assert isinstance(variant, tys.TupleType) + self.sum_rows.append(variant.inner) + self.other_outputs = outputs[1:] + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "required": [ + "parent", + "op", + "inputs", + "other_outputs", + "sum_rows", + "extension_delta", + ], + "description": "A CFG basic block node. The signature is that of the internal Dataflow graph.", + } + + +class ExitBlock(BaseOp): + """The single exit node of the CFG, has no children, stores the types of + the CFG node output.""" + + op: Literal["ExitBlock"] = "ExitBlock" + cfg_outputs: TypeRow + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": "The single exit node of the CFG, has no children, stores the types of the CFG node output." + } + + +# --------------------------------------------- +# --------------- DataflowOp ------------------ +# --------------------------------------------- + + +class DataflowOp(BaseOp): + pass + + +class Input(DataflowOp): + """An input node. The outputs of this node are the inputs to the parent node.""" + + op: Literal["Input"] = "Input" + types: TypeRow = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert len(in_types) == 0 + self.types = list(out_types) + + +class Output(DataflowOp): + """An output node. The inputs are the outputs of the function.""" + + op: Literal["Output"] = "Output" + types: TypeRow = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert len(out_types) == 0 + self.types = list(in_types) + + +class Call(DataflowOp): + """ + Call a function directly. + + The first port is connected to the def/declare of the function being called + directly, with a `ConstE` edge. The signature of the remaining ports matches + the function being called. + """ + + op: Literal["Call"] = "Call" + signature: FunctionType = Field(default_factory=FunctionType.empty) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + # The constE edge comes after the value inputs + fun_ty = in_types[-1] + assert isinstance(fun_ty, PolyFuncType) + poly_func = cast(PolyFuncType, fun_ty) + assert len(poly_func.params) == 0 + self.signature = poly_func.body + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "Operation to call a function directly. The first port is " + "connected to the def/declare of the function being called directly, " + "with a `Static` edge. The signature of the remaining " + "ports matches the function being called." + ) + } + + +class CallIndirect(DataflowOp): + """Call a function indirectly. + + Like call, but the first input is a standard dataflow graph type.""" + + op: Literal["CallIndirect"] = "CallIndirect" + signature: FunctionType = Field(default_factory=FunctionType.empty) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + fun_ty = in_types[0] + assert isinstance(fun_ty, PolyFuncType) + poly_func = cast(PolyFuncType, fun_ty) + assert len(poly_func.params) == 0 + assert len(poly_func.body.input) == len(in_types) - 1 + assert len(poly_func.body.output) == len(out_types) + self.signature = poly_func.body + + +class LoadConstant(DataflowOp): + """An operation that loads a static constant in to the local dataflow graph.""" + + op: Literal["LoadConstant"] = "LoadConstant" + datatype: Type + + +class LeafOpBase(DataflowOp): + """Simple operation that has only value inputs+outputs and (potentially) StateOrder + edges.""" + + op: Literal["LeafOp"] = "LeafOp" + + +class DFG(DataflowOp): + """A simply nested dataflow graph.""" + + op: Literal["DFG"] = "DFG" + signature: FunctionType = Field(default_factory=FunctionType.empty) + + def insert_child_dfg_signature(self, inputs: TypeRow, outputs: TypeRow) -> None: + self.signature = FunctionType( + input=list(inputs), output=list(outputs), extension_reqs=ExtensionSet([]) + ) + + +# ------------------------------------------------ +# --------------- ControlFlowOp ------------------ +# ------------------------------------------------ + + +class Conditional(DataflowOp): + """Conditional operation, defined by child `Case` nodes for each branch.""" + + op: Literal["Conditional"] = "Conditional" + other_inputs: TypeRow = Field(default_factory=list) # Remaining input types + outputs: TypeRow = Field(default_factory=list) # Output types + sum_rows: list[TypeRow] = Field(description="The possible rows of the Sum input") + # Extensions used to produce the outputs + extension_delta: ExtensionSet = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + # First port is a predicate, i.e. a sum of tuple types. We need to unpack + # those into a list of type rows + pred = in_types[0] + if isinstance(pred, tys.UnitSum): + self.sum_rows = [[] for _ in range(cast(tys.UnitSum, pred).size)] + else: + assert isinstance(pred, tys.GeneralSum) + self.sum_rows = [] + for ty in pred.rows: + assert isinstance(ty, tys.TupleType) + self.sum_rows.append(ty.inner) + self.other_inputs = list(in_types[1:]) + self.outputs = list(out_types) + + +class Case(BaseOp): + """Case ops - nodes valid inside Conditional nodes.""" + + op: Literal["Case"] = "Case" + # The signature of the contained dataflow graph. + signature: FunctionType = Field(default_factory=FunctionType.empty) + + def insert_child_dfg_signature(self, inputs: TypeRow, outputs: TypeRow) -> None: + self.signature = tys.FunctionType( + input=list(inputs), output=list(outputs), extension_reqs=ExtensionSet([]) + ) + + +class TailLoop(DataflowOp): + """Tail-controlled loop.""" + + op: Literal["TailLoop"] = "TailLoop" + just_inputs: TypeRow = Field(default_factory=list) # Types that are only input + just_outputs: TypeRow = Field(default_factory=list) # Types that are only output + # Types that are appended to both input and output: + rest: TypeRow = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert in_types == out_types + # self.just_inputs = list(in_types) + # self.just_outputs = list(out_types) + self.rest = list(in_types) + + +class CFG(DataflowOp): + """A dataflow node which is defined by a child CFG.""" + + op: Literal["CFG"] = "CFG" + signature: FunctionType = Field(default_factory=FunctionType.empty) + + def insert_port_types(self, inputs: TypeRow, outputs: TypeRow) -> None: + self.signature = FunctionType( + input=list(inputs), output=list(outputs), extension_reqs=ExtensionSet([]) + ) + + +ControlFlowOp = Conditional | TailLoop | CFG + + +# ----------------------------------------- +# --------------- LeafOp ------------------ +# ----------------------------------------- + + +class CustomOp(LeafOpBase): + """A user-defined operation that can be downcasted by the extensions that define + it.""" + + lop: Literal["CustomOp"] = "CustomOp" + extension: ExtensionId + op_name: str + signature: tys.FunctionType = Field(default_factory=tys.FunctionType.empty) + description: str = "" + args: list[tys.TypeArg] = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + self.signature = tys.FunctionType(input=list(in_types), output=list(out_types)) + + def display_name(self) -> str: + return self.op_name + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "A user-defined operation that can be downcasted by the extensions that " + "define it." + ) + } + + +class Noop(LeafOpBase): + """A no-op operation.""" + + lop: Literal["Noop"] = "Noop" + ty: Type + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert len(in_types) == 1 + assert len(out_types) == 1 + assert in_types[0] == out_types[0] + self.ty = in_types[0] + + +class MakeTuple(LeafOpBase): + """An operation that packs all its inputs into a tuple.""" + + lop: Literal["MakeTuple"] = "MakeTuple" + tys: TypeRow = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + # If we have a single order edge as input, this is a unit + if in_types == [None]: + in_types = [] + self.tys = list(in_types) + + +class UnpackTuple(LeafOpBase): + """An operation that packs all its inputs into a tuple.""" + + lop: Literal["UnpackTuple"] = "UnpackTuple" + tys: TypeRow = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + self.tys = list(out_types) + + +class Tag(LeafOpBase): + """An operation that creates a tagged sum value from one of its variants.""" + + lop: Literal["Tag"] = "Tag" + tag: int # The variant to create. + variants: TypeRow # The variants of the sum type. + + +class TypeApply(LeafOpBase): + """Fixes some TypeParams of a polymorphic type by providing TypeArgs.""" + + lop: Literal["TypeApply"] = "TypeApply" + ta: "TypeApplication" + + +class TypeApplication(BaseModel): + """Records details of an application of a PolyFuncType to some TypeArgs and the + result (a less-, but still potentially-, polymorphic type). + """ + + input: PolyFuncType + args: list[tys.TypeTypeArg] + output: PolyFuncType + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "Records details of an application of a PolyFuncType to some TypeArgs " + "and the result (a less-, but still potentially-, polymorphic type)." + ) + } + + +class LeafOp(RootModel): + """A constant operation.""" + + root: CustomOp | Noop | MakeTuple | UnpackTuple | Tag | TypeApply = Field( + discriminator="lop" + ) + + +class OpType(RootModel): + """A constant operation.""" + + root: ( + Module + | Case + | FuncDefn + | FuncDecl + | Const + | DataflowBlock + | ExitBlock + | Conditional + | TailLoop + | CFG + | Input + | Output + | Call + | CallIndirect + | LoadConstant + | LeafOp + | DFG + ) = Field(discriminator="op") + + +# -------------------------------------- +# --------------- OpDef ---------------- +# -------------------------------------- + + +class OpDef(BaseOp, populate_by_name=True): + """Serializable definition for dynamically loaded operations.""" + + name: str # Unique identifier of the operation. + description: str # Human readable description of the operation. + inputs: list[tuple[str | None, Type]] + outputs: list[tuple[str | None, Type]] + misc: dict[str, Any] # Miscellaneous data associated with the operation. + def_: str | None = Field( + ..., alias="def" + ) # (YAML?)-encoded definition of the operation. + extension_reqs: ExtensionSet # Resources required to execute this operation. + + +# Now that all classes are defined, we need to update the ForwardRefs in all type +# annotations. We use some inspect magic to find all classes defined in this file. +classes = inspect.getmembers( + sys.modules[__name__], + lambda member: inspect.isclass(member) and member.__module__ == __name__, +) +for _, c in classes: + if issubclass(c, BaseModel): + c.model_rebuild() diff --git a/quantinuum-hugr-py/src/quantinuum_hugr/serialization/serial_hugr.py b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/serial_hugr.py new file mode 100644 index 000000000..355756bce --- /dev/null +++ b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/serial_hugr.py @@ -0,0 +1,36 @@ +from typing import Any, Literal + +from pydantic import BaseModel + +from .ops import NodeID, OpType + +Port = tuple[NodeID, int | None] # (node, offset) +Edge = tuple[Port, Port] + + +class SerialHugr(BaseModel): + """A serializable representation of a Hugr.""" + + version: Literal["v1"] = "v1" + nodes: list[OpType] + edges: list[Edge] + + def to_json(self) -> str: + """Return a JSON representation of the Hugr.""" + return self.model_dump_json() + + @classmethod + def load_json(cls, json: dict[Any, Any]) -> "SerialHugr": + """Decode a JSON-encoded Hugr.""" + return cls(**json) + + @classmethod + def get_version(cls) -> str: + """Return the version of the schema.""" + return cls(nodes=[], edges=[]).version + + class Config: + title = "Hugr" + json_schema_extra = { + "required": ["version", "nodes", "edges"], + } diff --git a/quantinuum-hugr-py/src/quantinuum_hugr/serialization/tys.py b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/tys.py new file mode 100644 index 000000000..effc53d18 --- /dev/null +++ b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/tys.py @@ -0,0 +1,327 @@ +import inspect +import sys +from enum import Enum +from typing import Annotated, Any, Literal, Optional, Union + +from pydantic import ( + BaseModel, + Field, + RootModel, + ValidationError, + ValidationInfo, + ValidatorFunctionWrapHandler, + WrapValidator, +) +from pydantic_core import PydanticCustomError + + +def _json_custom_error_validator( + value: Any, handler: ValidatorFunctionWrapHandler, _info: ValidationInfo +) -> Any: + """Simplify the error message to avoid a gross error stemming + from exhaustive checking of all union options. + + As suggested at + https://docs.pydantic.dev/latest/concepts/types/#named-recursive-types + + + Used to define named recursive alias types. + """ + try: + return handler(value) + except ValidationError as err: + raise PydanticCustomError( + "invalid_json", + "Input is not valid json", + ) from err + + +ExtensionId = str + + +class ExtensionSet(RootModel): + """A set of extensions ids.""" + + root: Optional[list[ExtensionId]] = Field(default=None) + + +# -------------------------------------------- +# --------------- TypeParam ------------------ +# -------------------------------------------- + + +class TypeTypeParam(BaseModel): + tp: Literal["Type"] = "Type" + b: "TypeBound" + + +class BoundedNatParam(BaseModel): + tp: Literal["BoundedNat"] = "BoundedNat" + bound: int | None + + +class OpaqueParam(BaseModel): + tp: Literal["Opaque"] = "Opaque" + ty: "Opaque" + + +class ListParam(BaseModel): + tp: Literal["List"] = "List" + param: "TypeParam" + + +class TupleParam(BaseModel): + tp: Literal["Tuple"] = "Tuple" + params: list["TypeParam"] + + +class TypeParam(RootModel): + """A type parameter.""" + + root: Annotated[ + TypeTypeParam | BoundedNatParam | OpaqueParam | ListParam | TupleParam, + WrapValidator(_json_custom_error_validator), + ] = Field(discriminator="tp") + + +# ------------------------------------------ +# --------------- TypeArg ------------------ +# ------------------------------------------ + + +class CustomTypeArg(BaseModel): + typ: None # TODO + value: str + + +class TypeTypeArg(BaseModel): + tya: Literal["Type"] = "Type" + ty: "Type" + + +class BoundedNatArg(BaseModel): + tya: Literal["BoundedNat"] = "BoundedNat" + n: int + + +class OpaqueArg(BaseModel): + tya: Literal["Opaque"] = "Opaque" + arg: CustomTypeArg + + +class SequenceArg(BaseModel): + tya: Literal["Sequence"] = "Sequence" + args: list["TypeArg"] + + +class ExtensionsArg(BaseModel): + tya: Literal["Extensions"] = "Extensions" + es: ExtensionSet + + +class TypeArg(RootModel): + """A type argument.""" + + root: Annotated[ + TypeTypeArg | BoundedNatArg | OpaqueArg | SequenceArg | ExtensionsArg, + WrapValidator(_json_custom_error_validator), + ] = Field(discriminator="tya") + + +# -------------------------------------------- +# --------------- Container ------------------ +# -------------------------------------------- + + +class MultiContainer(BaseModel): + ty: "Type" + + +class Array(MultiContainer): + """Known size array whose elements are of the same type.""" + + t: Literal["Array"] = "Array" + len: int + + +class TupleType(BaseModel): + """Product type, known-size tuple over elements of type row.""" + + t: Literal["Tuple"] = "Tuple" + inner: "TypeRow" + + +class UnitSum(BaseModel): + """Simple predicate where all variants are empty tuples.""" + + t: Literal["Sum"] = "Sum" + + s: Literal["Unit"] = "Unit" + size: int + + +class GeneralSum(BaseModel): + """General sum type that explicitly stores the types of the variants.""" + + t: Literal["Sum"] = "Sum" + + s: Literal["General"] = "General" + rows: list["TypeRow"] + + +class SumType(RootModel): + root: Union[UnitSum, GeneralSum] = Field(discriminator="s") + + +# ---------------------------------------------- +# --------------- ClassicType ------------------ +# ---------------------------------------------- + + +class Variable(BaseModel): + """A type variable identified by a de Bruijn index.""" + + t: Literal["V"] = "V" + i: int + b: "TypeBound" + + +class USize(BaseModel): + """Unsigned integer size type.""" + + t: Literal["I"] = "I" + + +class FunctionType(BaseModel): + """A graph encoded as a value. It contains a concrete signature and a set of + required resources.""" + + input: "TypeRow" # Value inputs of the function. + output: "TypeRow" # Value outputs of the function. + # The extension requirements which are added by the operation + extension_reqs: "ExtensionSet" = Field(default_factory=list) + + @classmethod + def empty(cls) -> "FunctionType": + return FunctionType(input=[], output=[], extension_reqs=ExtensionSet([])) + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "A graph encoded as a value. It contains a concrete signature and " + "a set of required resources." + ) + } + + +class PolyFuncType(BaseModel): + """A graph encoded as a value. It contains a concrete signature and a set of + required resources.""" + + t: Literal["G"] = "G" + + # The declared type parameters, i.e., these must be instantiated with the same + # number of TypeArgs before the function can be called. Note that within the body, + # variable (DeBruijn) index 0 is element 0 of this array, i.e. the variables are + # bound from right to left. + params: list[TypeParam] + + # Template for the function. May contain variables up to length of `params` + body: FunctionType + + @classmethod + def empty(cls) -> "PolyFuncType": + return PolyFuncType(params=[], body=FunctionType.empty()) + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "A graph encoded as a value. It contains a concrete signature and " + "a set of required resources." + ) + } + + +class TypeBound(Enum): + Eq = "E" + Copyable = "C" + Any = "A" + + @staticmethod + def join(*bs: "TypeBound") -> "TypeBound": + """Computes the least upper bound for a sequence of bounds.""" + res = TypeBound.Eq + for b in bs: + if b == TypeBound.Any: + return TypeBound.Any + if res == TypeBound.Eq: + res = b + return res + + +class Opaque(BaseModel): + """An opaque operation that can be downcasted by the extensions that define it.""" + + t: Literal["Opaque"] = "Opaque" + extension: ExtensionId + id: str # Unique identifier of the opaque type. + args: list[TypeArg] + bound: TypeBound + + +# ---------------------------------------------- +# --------------- LinearType ------------------- +# ---------------------------------------------- + + +class Qubit(BaseModel): + """A qubit.""" + + t: Literal["Q"] = "Q" + + +class Type(RootModel): + """A HUGR type.""" + + root: Annotated[ + Qubit | Variable | USize | PolyFuncType | Array | TupleType | SumType | Opaque, + WrapValidator(_json_custom_error_validator), + ] = Field(discriminator="t") + + +# ------------------------------------------- +# --------------- TypeRow ------------------- +# ------------------------------------------- + +TypeRow = list[Type] + + +# ------------------------------------------- +# --------------- Signature ----------------- +# ------------------------------------------- + + +class Signature(BaseModel): + """Describes the edges required to/from a node. + + This includes both the concept of "signature" in the spec, and also the target + (value) of a call (constant). + """ + + signature: "PolyFuncType" # The underlying signature + + # The extensions which are associated with all the inputs and carried through + input_extensions: ExtensionSet + + +# Now that all classes are defined, we need to update the ForwardRefs in all type +# annotations. We use some inspect magic to find all classes defined in this file. +classes = inspect.getmembers( + sys.modules[__name__], + lambda member: inspect.isclass(member) and member.__module__ == __name__, +) +for _, c in classes: + if issubclass(c, BaseModel): + c.model_rebuild() diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/scripts/generate_schema.py b/scripts/generate_schema.py new file mode 100644 index 000000000..6f891716d --- /dev/null +++ b/scripts/generate_schema.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +"""Dumps the json schema for `quantinuum_hugr.serialization.SerialHugr` to a file. + +The schema is written to a file named `hugr_schema_v#.json` in the specified output directory. +If no output directory is specified, the schema is written to the current working directory. + +usage: python generate_schema.py [] +""" + +import json +import sys +from pathlib import Path + +from pydantic import TypeAdapter + +from quantinuum_hugr.serialization import SerialHugr + +if __name__ == "__main__": + if len(sys.argv) == 1: + out_dir = Path.cwd() + elif len(sys.argv) == 2: + out_dir = Path(sys.argv[1]) + else: + print(__doc__) + sys.exit(1) + + version = SerialHugr.get_version() + filename = f"hugr_schema_{version}.json" + path = out_dir / filename + + print(f"Writing schema to {path}") + + with path.open("w") as f: + json.dump(TypeAdapter(SerialHugr).json_schema(), f, indent=4) diff --git a/specification/schema/hugr_schema_v1.json b/specification/schema/hugr_schema_v1.json index 7e7a06134..3e0a38165 100644 --- a/specification/schema/hugr_schema_v1.json +++ b/specification/schema/hugr_schema_v1.json @@ -74,7 +74,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "CFG", @@ -99,7 +99,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Call", @@ -117,14 +117,14 @@ "type": "object" }, "CallIndirect": { - "description": "Operation to call a function indirectly. Like call, but the first input is a standard dataflow graph type.", + "description": "Call a function indirectly.\n\nLike call, but the first input is a standard dataflow graph type.", "properties": { "parent": { "title": "Parent", "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "CallIndirect", @@ -149,7 +149,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Case", @@ -174,7 +174,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Conditional", @@ -195,20 +195,55 @@ "title": "Outputs", "type": "array" }, - "extension_delta": { + "sum_rows": { + "description": "The possible rows of the Sum input", "items": { - "type": "string" + "items": { + "$ref": "#/$defs/Type" + }, + "type": "array" }, - "title": "Extension Delta", + "title": "Sum Rows", "type": "array" + }, + "extension_delta": { + "$ref": "#/$defs/ExtensionSet" } }, "required": [ - "parent" + "parent", + "sum_rows" ], "title": "Conditional", "type": "object" }, + "Const": { + "description": "A constant operation.", + "discriminator": { + "mapping": { + "Extension": "#/$defs/ExtensionConst", + "Function": "#/$defs/FunctionConst", + "Sum": "#/$defs/Sum", + "Tuple": "#/$defs/Tuple" + }, + "propertyName": "c" + }, + "oneOf": [ + { + "$ref": "#/$defs/ExtensionConst" + }, + { + "$ref": "#/$defs/FunctionConst" + }, + { + "$ref": "#/$defs/Tuple" + }, + { + "$ref": "#/$defs/Sum" + } + ], + "title": "Const" + }, "CustomOp": { "description": "A user-defined operation that can be downcasted by the extensions that define it.", "properties": { @@ -217,7 +252,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -287,7 +322,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "DFG", @@ -312,7 +347,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "DataflowBlock", @@ -344,15 +379,16 @@ "type": "array" }, "extension_delta": { - "items": { - "type": "string" - }, - "title": "Extension Delta", - "type": "array" + "$ref": "#/$defs/ExtensionSet" } }, "required": [ - "parent" + "parent", + "op", + "inputs", + "other_outputs", + "sum_rows", + "extension_delta" ], "title": "DataflowBlock", "type": "object" @@ -365,7 +401,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "ExitBlock", @@ -395,7 +431,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Const", @@ -420,6 +456,22 @@ "title": "ExtensionConst", "type": "object" }, + "ExtensionSet": { + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "default": null, + "description": "A set of extensions ids.", + "title": "ExtensionSet" + }, "ExtensionsArg": { "properties": { "tya": { @@ -428,11 +480,7 @@ "title": "Tya" }, "es": { - "items": { - "type": "string" - }, - "title": "Es", - "type": "array" + "$ref": "#/$defs/ExtensionSet" } }, "required": [ @@ -449,7 +497,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "FuncDecl", @@ -479,7 +527,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "FuncDefn", @@ -509,7 +557,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Const", @@ -552,11 +600,7 @@ "type": "array" }, "extension_reqs": { - "items": { - "type": "string" - }, - "title": "Extension Reqs", - "type": "array" + "$ref": "#/$defs/ExtensionSet" } }, "required": [ @@ -582,9 +626,9 @@ "rows": { "items": { "items": { - "$ref": "#/$defs/Type", - "type": "array" - } + "$ref": "#/$defs/Type" + }, + "type": "array" }, "title": "Rows", "type": "array" @@ -604,7 +648,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Input", @@ -625,33 +669,8 @@ "title": "Input", "type": "object" }, - "InputExtensions": { - "anyOf": [ - { - "items": { - "type": "string" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "default": null - }, - "USize": { - "description": "Unsigned integer size type.", - "properties": { - "t": { - "const": "I", - "default": "I", - "title": "T" - } - }, - "title": "USize", - "type": "object" - }, "LeafOp": { + "description": "A constant operation.", "discriminator": { "mapping": { "CustomOp": "#/$defs/CustomOp", @@ -682,7 +701,8 @@ { "$ref": "#/$defs/TypeApply" } - ] + ], + "title": "LeafOp" }, "ListParam": { "properties": { @@ -709,7 +729,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LoadConstant", @@ -735,7 +755,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -769,7 +789,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Module", @@ -791,7 +811,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -815,6 +835,7 @@ "type": "object" }, "OpType": { + "description": "A constant operation.", "discriminator": { "mapping": { "CFG": "#/$defs/CFG", @@ -822,34 +843,9 @@ "CallIndirect": "#/$defs/CallIndirect", "Case": "#/$defs/Case", "Conditional": "#/$defs/Conditional", - "Const": { - "discriminator": { - "mapping": { - "Extension": "#/$defs/ExtensionConst", - "Function": "#/$defs/FunctionConst", - "Sum": "#/$defs/Sum", - "Tuple": "#/$defs/Tuple" - }, - "propertyName": "c" - }, - "oneOf": [ - { - "$ref": "#/$defs/ExtensionConst" - }, - { - "$ref": "#/$defs/FunctionConst" - }, - { - "$ref": "#/$defs/Tuple" - }, - { - "$ref": "#/$defs/Sum" - } - ] - }, + "Const": "#/$defs/Const", "DFG": "#/$defs/DFG", "DataflowBlock": "#/$defs/DataflowBlock", - "DummyOp": "#/$defs/DummyOp", "ExitBlock": "#/$defs/ExitBlock", "FuncDecl": "#/$defs/FuncDecl", "FuncDefn": "#/$defs/FuncDefn", @@ -876,32 +872,7 @@ "$ref": "#/$defs/FuncDecl" }, { - "discriminator": { - "mapping": { - "Extension": "#/$defs/ExtensionConst", - "Function": "#/$defs/FunctionConst", - "Sum": "#/$defs/Sum", - "Tuple": "#/$defs/Tuple" - }, - "propertyName": "c" - }, - "oneOf": [ - { - "$ref": "#/$defs/ExtensionConst" - }, - { - "$ref": "#/$defs/FunctionConst" - }, - { - "$ref": "#/$defs/Tuple" - }, - { - "$ref": "#/$defs/Sum" - } - ] - }, - { - "$ref": "#/$defs/DummyOp" + "$ref": "#/$defs/Const" }, { "$ref": "#/$defs/DataflowBlock" @@ -939,7 +910,8 @@ { "$ref": "#/$defs/DFG" } - ] + ], + "title": "OpType" }, "Opaque": { "description": "An opaque operation that can be downcasted by the extensions that define it.", @@ -1019,7 +991,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Output", @@ -1103,12 +1075,11 @@ "description": "A Sum variant For any Sum type where this value meets the type of the variant indicated by the tag.", "properties": { "parent": { - "default": 0, "title": "Parent", "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Const", @@ -1129,29 +1100,7 @@ }, "vs": { "items": { - "discriminator": { - "mapping": { - "Extension": "#/$defs/ExtensionConst", - "Function": "#/$defs/FunctionConst", - "Sum": "#/$defs/Sum", - "Tuple": "#/$defs/Tuple" - }, - "propertyName": "c" - }, - "oneOf": [ - { - "$ref": "#/$defs/ExtensionConst" - }, - { - "$ref": "#/$defs/FunctionConst" - }, - { - "$ref": "#/$defs/Tuple" - }, - { - "$ref": "#/$defs/Sum" - } - ] + "$ref": "#/$defs/Const" }, "title": "Vs", "type": "array" @@ -1168,6 +1117,24 @@ "title": "Sum", "type": "object" }, + "SumType": { + "discriminator": { + "mapping": { + "General": "#/$defs/GeneralSum", + "Unit": "#/$defs/UnitSum" + }, + "propertyName": "s" + }, + "oneOf": [ + { + "$ref": "#/$defs/UnitSum" + }, + { + "$ref": "#/$defs/GeneralSum" + } + ], + "title": "SumType" + }, "Tag": { "description": "An operation that creates a tagged sum value from one of its variants.", "properties": { @@ -1176,7 +1143,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -1216,7 +1183,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "TailLoop", @@ -1259,7 +1226,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Const", @@ -1273,29 +1240,7 @@ }, "vs": { "items": { - "discriminator": { - "mapping": { - "Extension": "#/$defs/ExtensionConst", - "Function": "#/$defs/FunctionConst", - "Sum": "#/$defs/Sum", - "Tuple": "#/$defs/Tuple" - }, - "propertyName": "c" - }, - "oneOf": [ - { - "$ref": "#/$defs/ExtensionConst" - }, - { - "$ref": "#/$defs/FunctionConst" - }, - { - "$ref": "#/$defs/Tuple" - }, - { - "$ref": "#/$defs/Sum" - } - ] + "$ref": "#/$defs/Const" }, "title": "Vs", "type": "array" @@ -1353,24 +1298,8 @@ "title": "TupleType", "type": "object" }, - "SumType": { - "discriminator": { - "mapping": { - "General": "#/$defs/GeneralSum", - "Unit": "#/$defs/UnitSum" - }, - "propertyName": "s" - }, - "oneOf": [ - { - "$ref": "#/$defs/UnitSum" - }, - { - "$ref": "#/$defs/GeneralSum" - } - ] - }, "Type": { + "description": "A HUGR type.", "discriminator": { "mapping": { "Array": "#/$defs/Array", @@ -1409,7 +1338,8 @@ { "$ref": "#/$defs/Opaque" } - ] + ], + "title": "Type" }, "TypeApplication": { "description": "Records details of an application of a PolyFuncType to some TypeArgs and the result (a less-, but still potentially-, polymorphic type).", @@ -1444,7 +1374,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -1468,6 +1398,7 @@ "type": "object" }, "TypeArg": { + "description": "A type argument.", "discriminator": { "mapping": { "BoundedNat": "#/$defs/BoundedNatArg", @@ -1494,7 +1425,8 @@ { "$ref": "#/$defs/ExtensionsArg" } - ] + ], + "title": "TypeArg" }, "TypeBound": { "enum": [ @@ -1506,6 +1438,7 @@ "type": "string" }, "TypeParam": { + "description": "A type parameter.", "discriminator": { "mapping": { "BoundedNat": "#/$defs/BoundedNatParam", @@ -1532,7 +1465,8 @@ { "$ref": "#/$defs/TupleParam" } - ] + ], + "title": "TypeParam" }, "TypeTypeArg": { "properties": { @@ -1568,6 +1502,18 @@ "title": "TypeTypeParam", "type": "object" }, + "USize": { + "description": "Unsigned integer size type.", + "properties": { + "t": { + "const": "I", + "default": "I", + "title": "T" + } + }, + "title": "USize", + "type": "object" + }, "UnitSum": { "description": "Simple predicate where all variants are empty tuples.", "properties": { @@ -1600,7 +1546,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -1650,6 +1596,7 @@ "type": "object" } }, + "description": "A serializable representation of a Hugr.", "properties": { "version": { "const": "v1", diff --git a/specification/schema/serialization.md b/specification/schema/serialization.md index 29e57e532..f14fd40bd 100755 --- a/specification/schema/serialization.md +++ b/specification/schema/serialization.md @@ -1,106 +1,16 @@ +# HUGR serialization schema -# Serialization Options +This folder contains the schema for the serialization of the HUGR objects +compliant with the [JSON Schema](https://json-schema.org/draft/2020-12/release-notes) +specification. -Given most of our tooling is in Rust it is useful to narrow our search -to options that have good [serde](https://serde.rs/) compatibility. For -most datastructures, serde allows us to to get serialization and -deserialization to many formats just by annotating the datastructures, -with no bespoke code. This is a maintainability godsend. It is also very -fast. +The model is generated from the pydantic model in the `quantinuum_hugr` python +package, and is used to validate the serialization format of the Rust +implementation. -Unfortunately, this excludes the Tierkreis serialization technology -Protobuf, as its [serde support is -poor](https://docs.rs/serde-protobuf/latest/serde_protobuf/) -(serialization not supported at all currently). In general, as Protobuf -does its own code generation in the languages it supports, you have to -work with the datastructures it constructs (and as in the case of -Tierkreis, write a lot of boilerplate code to then convert those in to -the data you want to work with), wheras serde just handles your own -datastructures for you. - -With that in mind, [this -article](https://blog.logrocket.com/rust-serialization-whats-ready-for-production-today/) -has a good summary of performance benchmarks for various options. An -interesting find here was -[FlatBuffers](https://google.github.io/flatbuffers/), Google's protobuf -alternative with zero-copy. Unfortunately, as the article describes it -is quite annoying to work with in Rust and shares the protobuf -schema-related problems mentioned above. - -The highest performing target is -[bincode](https://github.com/bincode-org/bincode), but it does not seem -to be widely used and has poor python support. Another notable mention -is [CBOR](https://cbor.io/); it is however not very well performing on the benchmarks. - -If we take a good balance between performance and language compatibility -MessagePack (or [msgpack](https://msgpack.org/) ) appears to be a very -solid option. It has good serde support (as well as very wide language -support in general, including a fast python package implemented in C), -is one of the top performers on benchmarks (see also [this -thesis](https://hdl.handle.net/10657/13140)), -and has small data size. Another nice benefit is that, like CBOR, it is -very similar to JSON when decoded, which, given that serde can easily -let us go between JSON and msgpack, gives us human-friendly text -visibility. The similarity to JSON also allows very easy conversion from -Python dictionaries. - -# Conclusion - -- Use serde to serialize and deserialize the HUGR rust struct. - -- For serialised format we tentatively propose msgpack, but note that - serde allows a very low cost change to this at a later date. - -- In future if a human interpretable text format is required build a - standalone module - this could well be [a set of MLIR - dialects](https://github.com/PennyLaneAI/catalyst/tree/main/mlir) . - -## Note - -One important downside of this approach, particularly in comparison with -code-generating options like Protobuf, is that non-Rust languages (in -our case, most notably Python, and in future likely also C++) will -require code for handling the binary format and representing the data -model natively. However, for Python at least, this can be achieved -relatively simply with [Pydantic](https://docs.pydantic.dev/). This also -brings with it Python-side schema generation and validation. As an -example, the below fully implements serialization/deserialization of the -spec described in the [main document](hugr.md). - -```python -from typing import Any -import ormsgpack -from pydantic import BaseModel - -class MPBaseModel(BaseModel): - def packb(self) -> bytes: - return ormsgpack.packb( - self, option=ormsgpack.OPT_SERIALIZE_PYDANTIC | ormsgpack.OPT_NON_STR_KEYS - ) - - @classmethod - def unpackb(cls, b: bytes) -> "MPBaseModel": - return cls(**ormsgpack.unpackb(b, option=ormsgpack.OPT_NON_STR_KEYS)) - - -NodeID = int -Port = tuple[NodeID, int] # (node, offset) -NodeWeight = Any - -class Hugr(MPBaseModel): - # (parent, #incoming, #outgoing, NodeWeight) - nodes: list[tuple[NodeID, int, int, NodeWeight]] - edges: list[tuple[Port, Port]] - root: NodeID - -# TODO: specify scheme for NodeWeight - -with open("../hugr/foo.bin", "rb") as f: - # print(Hugr.schema_json()) - pg = Hugr.unpackb(f.read()) - print(pg) - outb = pg.packb() - f.seek(0) - assert outb == f.read() +A script `generate_schema.py` is provided to regenerate the schema. To update +the schema, run the following command: +```bash +just update-schema ```