diff --git a/flama/background.py b/flama/background.py index acc91b86..d4857a9e 100644 --- a/flama/background.py +++ b/flama/background.py @@ -10,11 +10,11 @@ if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover from typing_extensions import ParamSpec - t.ParamSpec = ParamSpec + t.ParamSpec = ParamSpec # type: ignore __all__ = ["BackgroundTask", "BackgroundTasks", "Concurrency", "BackgroundThreadTask", "BackgroundProcessTask"] -P = t.ParamSpec("P") +P = t.ParamSpec("P") # type: ignore # PORT: Remove this comment when stop supporting 3.9 class task_wrapper: diff --git a/flama/concurrency.py b/flama/concurrency.py index cb9e4ff0..7012e1f2 100644 --- a/flama/concurrency.py +++ b/flama/concurrency.py @@ -4,19 +4,33 @@ import sys import typing as t +if sys.version_info < (3, 9): # PORT: Remove when stop supporting 3.8 # pragma: no cover + import contextvars + + async def to_thread(func, /, *args, **kwargs): + return await asyncio.get_running_loop().run_in_executor( + None, functools.partial(contextvars.copy_context().run, func, *args, **kwargs) + ) + + asyncio.to_thread = to_thread # pyright: ignore + if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover from typing_extensions import ParamSpec, TypeGuard - t.TypeGuard = TypeGuard - t.ParamSpec = ParamSpec + t.TypeGuard = TypeGuard # type: ignore + t.ParamSpec = ParamSpec # type: ignore __all__ = ["is_async", "run", "run"] R = t.TypeVar("R", covariant=True) -P = t.ParamSpec("P") +P = t.ParamSpec("P") # type: ignore # PORT: Remove this comment when stop supporting 3.9 -def is_async(obj: t.Any) -> t.TypeGuard[t.Callable[..., t.Awaitable[t.Any]]]: +def is_async( + obj: t.Any, +) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9 + t.Callable[..., t.Awaitable[t.Any]] +]: """Check if given object is an async function, callable or partialised function. :param obj: Object to check. diff --git a/flama/routing.py b/flama/routing.py index b5a80706..52a1b381 100644 --- a/flama/routing.py +++ b/flama/routing.py @@ -13,7 +13,7 @@ if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover from typing_extensions import TypeGuard - t.TypeGuard = TypeGuard + t.TypeGuard = TypeGuard # type: ignore if t.TYPE_CHECKING: from flama.applications import Flama @@ -336,7 +336,9 @@ def __repr__(self) -> str: @staticmethod def is_endpoint( x: t.Union[t.Callable, t.Type[endpoints.HTTPEndpoint]] - ) -> t.TypeGuard[t.Type[endpoints.HTTPEndpoint]]: + ) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9 + t.Type[endpoints.HTTPEndpoint] + ]: return inspect.isclass(x) and issubclass(x, endpoints.HTTPEndpoint) def endpoint_handlers(self) -> t.Dict[str, t.Callable]: @@ -412,7 +414,9 @@ def __eq__(self, other: t.Any) -> bool: @staticmethod def is_endpoint( x: t.Union[t.Callable, t.Type[endpoints.WebSocketEndpoint]] - ) -> t.TypeGuard[t.Type[endpoints.WebSocketEndpoint]]: + ) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9 + t.Type[endpoints.WebSocketEndpoint] + ]: return inspect.isclass(x) and issubclass(x, endpoints.WebSocketEndpoint) def endpoint_handlers(self) -> t.Dict[str, t.Callable]: diff --git a/flama/schemas/_libs/marshmallow/adapter.py b/flama/schemas/_libs/marshmallow/adapter.py index a9e51a4e..b4dd9efa 100644 --- a/flama/schemas/_libs/marshmallow/adapter.py +++ b/flama/schemas/_libs/marshmallow/adapter.py @@ -15,7 +15,7 @@ if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover from typing_extensions import TypeGuard - t.TypeGuard = TypeGuard + t.TypeGuard = TypeGuard # type: ignore if t.TYPE_CHECKING: from apispec.ext.marshmallow import OpenAPIConverter @@ -115,10 +115,18 @@ def unique_schema(self, schema: t.Union[Schema, t.Type[Schema]]) -> t.Type[Schem return schema - def is_schema(self, obj: t.Any) -> t.TypeGuard[t.Union[Schema, t.Type[Schema]]]: + def is_schema( + self, obj: t.Any + ) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9 + t.Union[Schema, t.Type[Schema]] + ]: return isinstance(obj, Schema) or (inspect.isclass(obj) and issubclass(obj, Schema)) - def is_field(self, obj: t.Any) -> t.TypeGuard[t.Union[Field, t.Type[Field]]]: + def is_field( + self, obj: t.Any + ) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9 + t.Union[Field, t.Type[Field]] + ]: return isinstance(obj, Field) or (inspect.isclass(obj) and issubclass(obj, Field)) def _schema_instance(self, schema: t.Union[t.Type[Schema], Schema]) -> Schema: diff --git a/flama/schemas/_libs/pydantic/adapter.py b/flama/schemas/_libs/pydantic/adapter.py index 1aed93ce..5bfa7534 100644 --- a/flama/schemas/_libs/pydantic/adapter.py +++ b/flama/schemas/_libs/pydantic/adapter.py @@ -14,7 +14,7 @@ if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover from typing_extensions import TypeGuard - t.TypeGuard = TypeGuard + t.TypeGuard = TypeGuard # type: ignore __all__ = ["PydanticAdapter"] @@ -116,8 +116,12 @@ def to_json_schema(self, schema: t.Union[Schema, t.Type[Schema], Field]) -> JSON def unique_schema(self, schema: t.Union[Schema, t.Type[Schema]]) -> t.Type[Schema]: return schema.__class__ if isinstance(schema, Schema) else schema - def is_schema(self, obj: t.Any) -> t.TypeGuard[t.Type[Schema]]: + def is_schema( + self, obj: t.Any + ) -> t.TypeGuard[t.Type[Schema]]: # type: ignore # PORT: Remove this comment when stop supporting 3.9 return inspect.isclass(obj) and issubclass(obj, Schema) - def is_field(self, obj: t.Any) -> t.TypeGuard[Field]: + def is_field( + self, obj: t.Any + ) -> t.TypeGuard[Field]: # type: ignore # PORT: Remove this comment when stop supporting 3.9 return isinstance(obj, Field) diff --git a/flama/schemas/_libs/typesystem/adapter.py b/flama/schemas/_libs/typesystem/adapter.py index a1d2d93e..a31afa94 100644 --- a/flama/schemas/_libs/typesystem/adapter.py +++ b/flama/schemas/_libs/typesystem/adapter.py @@ -13,7 +13,7 @@ if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover from typing_extensions import TypeGuard - t.TypeGuard = TypeGuard + t.TypeGuard = TypeGuard # type: ignore __all__ = ["TypesystemAdapter"] @@ -101,9 +101,13 @@ def unique_schema(self, schema: Schema) -> Schema: return schema @t.no_type_check - def is_schema(self, obj: t.Any) -> t.TypeGuard[Schema]: + def is_schema( + self, obj: t.Any + ) -> t.TypeGuard[Schema]: # type: ignore # PORT: Remove this comment when stop supporting 3.9 return isinstance(obj, Schema) or (inspect.isclass(obj) and issubclass(obj, Schema)) @t.no_type_check - def is_field(self, obj: t.Any) -> t.TypeGuard[Field]: + def is_field( + self, obj: t.Any + ) -> t.TypeGuard[Field]: # type: ignore # PORT: Remove this comment when stop supporting 3.9 return isinstance(obj, Field) or (inspect.isclass(obj) and issubclass(obj, Field)) diff --git a/flama/schemas/adapter.py b/flama/schemas/adapter.py index a21db96f..1c5da1fb 100644 --- a/flama/schemas/adapter.py +++ b/flama/schemas/adapter.py @@ -8,7 +8,7 @@ if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover from typing_extensions import TypeGuard - t.TypeGuard = TypeGuard + t.TypeGuard = TypeGuard # type: ignore class Adapter(t.Generic[_T_Schema, _T_Field], metaclass=abc.ABCMeta): @@ -83,9 +83,17 @@ def unique_schema(self, schema: t.Union[_T_Schema, t.Type[_T_Schema]]) -> t.Unio ... @abc.abstractmethod - def is_schema(self, obj: t.Any) -> t.TypeGuard[t.Union[_T_Schema, t.Type[_T_Schema]]]: + def is_schema( + self, obj: t.Any + ) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9 + t.Union[_T_Schema, t.Type[_T_Schema]] + ]: ... @abc.abstractmethod - def is_field(self, obj: t.Any) -> t.TypeGuard[t.Union[_T_Field, t.Type[_T_Field]]]: + def is_field( + self, obj: t.Any + ) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9 + t.Union[_T_Field, t.Type[_T_Field]] + ]: ... diff --git a/flama/schemas/data_structures.py b/flama/schemas/data_structures.py index 35e28752..601f6e76 100644 --- a/flama/schemas/data_structures.py +++ b/flama/schemas/data_structures.py @@ -10,7 +10,7 @@ if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover from typing_extensions import TypeGuard - t.TypeGuard = TypeGuard + t.TypeGuard = TypeGuard # type: ignore __all__ = ["Field", "Schema", "Parameter", "Parameters"] diff --git a/flama/types/asgi.py b/flama/types/asgi.py index 1319e314..97c6a172 100644 --- a/flama/types/asgi.py +++ b/flama/types/asgi.py @@ -4,8 +4,8 @@ if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover from typing_extensions import Concatenate, ParamSpec - t.Concatenate = Concatenate - t.ParamSpec = ParamSpec + t.Concatenate = Concatenate # type: ignore + t.ParamSpec = ParamSpec # type: ignore if t.TYPE_CHECKING: from flama import endpoints # noqa @@ -29,7 +29,7 @@ "WebSocketHandler", ] -P = t.ParamSpec("P") +P = t.ParamSpec("P") # type: ignore # PORT: Remove this comment when stop supporting 3.9 R = t.TypeVar("R", covariant=True) Scope = t.NewType("Scope", t.MutableMapping[str, t.Any]) @@ -73,8 +73,12 @@ def __init__(self, app: App, *args: P.args, **kwargs: P.kwargs): ... -MiddlewareFunction = t.Callable[t.Concatenate[App, P], App] -MiddlewareAsyncFunction = t.Callable[t.Concatenate[App, P], t.Awaitable[App]] +MiddlewareFunction = t.Callable[ + t.Concatenate[App, P], App # type: ignore # PORT: Remove this comment when stop supporting 3.9 +] +MiddlewareAsyncFunction = t.Callable[ + t.Concatenate[App, P], t.Awaitable[App] # type: ignore # PORT: Remove this comment when stop supporting 3.9 +] Middleware = t.Union[t.Type[MiddlewareClass], t.Type[MiddlewareAsyncClass], MiddlewareFunction, MiddlewareAsyncFunction] HTTPHandler = t.Union[AppFunction, t.Type["endpoints.HTTPEndpoint"]] diff --git a/flama/types/schema.py b/flama/types/schema.py index f14fc33d..1df22333 100644 --- a/flama/types/schema.py +++ b/flama/types/schema.py @@ -5,7 +5,7 @@ if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover from typing_extensions import TypeGuard - t.TypeGuard = TypeGuard + t.TypeGuard = TypeGuard # type: ignore __all__ = ["Schema", "is_schema"] @@ -13,7 +13,9 @@ _T_Schema = t.TypeVar("_T_Schema") -def is_schema(obj: t.Any) -> t.TypeGuard[t.Type["Schema"]]: +def is_schema( + obj: t.Any, +) -> t.TypeGuard[t.Type["Schema"]]: # type: ignore # PORT: Remove this comment when stop supporting 3.9 return inspect.isclass(obj) and issubclass(obj, Schema) diff --git a/poetry.lock b/poetry.lock index 52118031..33896538 100644 --- a/poetry.lock +++ b/poetry.lock @@ -532,6 +532,35 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "cmake" +version = "3.27.1" +description = "CMake is an open-source, cross-platform family of tools designed to build, test and package software" +optional = false +python-versions = "*" +files = [ + {file = "cmake-3.27.1-py2.py3-none-macosx_10_10_universal2.macosx_10_10_x86_64.macosx_11_0_arm64.macosx_11_0_universal2.whl", hash = "sha256:c62c5a6d42e68eb955fc321f7bc84290e4c4771ee7e5301c2eaa9586c874fd8e"}, + {file = "cmake-3.27.1-py2.py3-none-manylinux2010_i686.manylinux_2_12_i686.whl", hash = "sha256:18ef1c579cb4c94ece6bbb7c7f3e0170b078bf787f0a372194f0921e79f6098c"}, + {file = "cmake-3.27.1-py2.py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:ad3aca0d94abe6313a7b1c65b8b3d7eb3158786fd1dd6a9f8c42f82850fb974c"}, + {file = "cmake-3.27.1-py2.py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:50bfe69d369a61eb63e5b8af76a2383cf312d1e8449bd797d563f6c62809d317"}, + {file = "cmake-3.27.1-py2.py3-none-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:7fb6d9183b90d4cc4db7b022aa7c9ef3431d281aea29ca259de7199bc75b7e09"}, + {file = "cmake-3.27.1-py2.py3-none-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:2583464302ecc287619578627e26962386a41a98bbf1fb4c8c90d600ec1a1be5"}, + {file = "cmake-3.27.1-py2.py3-none-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:7a5431c7ca0b7145b857dd0eab26f4f9ec42661bb67afa6d437b3e48532b8e3a"}, + {file = "cmake-3.27.1-py2.py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1fb6d882bbd7e77fad206dfdbcaf880f4bcd7e8d0c23b37058ee155715bd19ed"}, + {file = "cmake-3.27.1-py2.py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:cee7dd0bcc5bd14d94ecdbbf9883b17f3001adc5f696b7d8eba0482354e5e017"}, + {file = "cmake-3.27.1-py2.py3-none-musllinux_1_1_i686.whl", hash = "sha256:82a6f57449e7bf9b510ed82b29982e4eec8b5c5e80a51208368dc1aa58b8181b"}, + {file = "cmake-3.27.1-py2.py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:7052bb12c3492083169269fee7c7a11c053cae35949346b12d2998b971602b78"}, + {file = "cmake-3.27.1-py2.py3-none-musllinux_1_1_s390x.whl", hash = "sha256:482e7018fc8d9bc98e7f30b5071c021ca0e27b131dd61900395abfd768c3fe29"}, + {file = "cmake-3.27.1-py2.py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:a112dd298b8ac598fef3653dff1592ba4c0f9bf7fe75b77ab44a6edfcceb96d4"}, + {file = "cmake-3.27.1-py2.py3-none-win32.whl", hash = "sha256:b9d68783ea01775d6d4ea220d3b4e90d5e287cf9a1db09c5a9b78c7748e1c3d0"}, + {file = "cmake-3.27.1-py2.py3-none-win_amd64.whl", hash = "sha256:628f75286475b89d6566db62c0869de5f0a07ad9bba10bebe6a48012fa1ee777"}, + {file = "cmake-3.27.1-py2.py3-none-win_arm64.whl", hash = "sha256:ee7a47e37a29b8124d9125a8c390fb94822a2695d80151560004d4f4f78c0ad7"}, + {file = "cmake-3.27.1.tar.gz", hash = "sha256:7ee6af09b2b575a491483b72927ee7e4beb59e7fb86e6d905a7027607a3f367e"}, +] + +[package.extras] +test = ["coverage (>=4.2)", "flake8 (>=3.0.4)", "path.py (>=11.5.0)", "pytest (>=3.0.3)", "pytest-cov (>=2.4.0)", "pytest-runner (>=2.9)", "pytest-virtualenv (>=1.7.0)", "scikit-build (>=0.10.0)", "setuptools (>=28.0.0)", "virtualenv (>=15.0.3)", "wheel"] + [[package]] name = "colorama" version = "0.4.6" @@ -1420,6 +1449,16 @@ files = [ {file = "libclang-16.0.6.tar.gz", hash = "sha256:4acdde39dfe410c877b4ccc0d4b57eb952100e4ee26bbdf6cfdb88e2033a7d31"}, ] +[[package]] +name = "lit" +version = "16.0.6" +description = "A Software Testing Tool" +optional = false +python-versions = "*" +files = [ + {file = "lit-16.0.6.tar.gz", hash = "sha256:84623c9c23b6b14763d637f4e63e6b721b3446ada40bf7001d8fee70b8e77a9a"}, +] + [[package]] name = "markdown" version = "3.4.4" @@ -1747,6 +1786,164 @@ files = [ {file = "numpy-1.23.5.tar.gz", hash = "sha256:1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a"}, ] +[[package]] +name = "nvidia-cublas-cu11" +version = "11.10.3.66" +description = "CUBLAS native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl", hash = "sha256:d32e4d75f94ddfb93ea0a5dda08389bcc65d8916a25cb9f37ac89edaeed3bded"}, + {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-win_amd64.whl", hash = "sha256:8ac17ba6ade3ed56ab898a036f9ae0756f1e81052a317bf98f8c6d18dc3ae49e"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cuda-cupti-cu11" +version = "11.7.101" +description = "CUDA profiling tools runtime libs." +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cuda_cupti_cu11-11.7.101-py3-none-manylinux1_x86_64.whl", hash = "sha256:e0cfd9854e1f2edaa36ca20d21cd0bdd5dcfca4e3b9e130a082e05b33b6c5895"}, + {file = "nvidia_cuda_cupti_cu11-11.7.101-py3-none-win_amd64.whl", hash = "sha256:7cc5b8f91ae5e1389c3c0ad8866b3b016a175e827ea8f162a672990a402ab2b0"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cuda-nvrtc-cu11" +version = "11.7.99" +description = "NVRTC native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:9f1562822ea264b7e34ed5930567e89242d266448e936b85bc97a3370feabb03"}, + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:f7d9610d9b7c331fa0da2d1b2858a4a8315e6d49765091d28711c8946e7425e7"}, + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:f2effeb1309bdd1b3854fc9b17eaf997808f8b25968ce0c7070945c4265d64a3"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cuda-runtime-cu11" +version = "11.7.99" +description = "CUDA Runtime native Libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:cc768314ae58d2641f07eac350f40f99dcb35719c4faff4bc458a7cd2b119e31"}, + {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:bc77fa59a7679310df9d5c70ab13c4e34c64ae2124dd1efd7e5474b71be125c7"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cudnn-cu11" +version = "8.5.0.96" +description = "cuDNN runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:402f40adfc6f418f9dae9ab402e773cfed9beae52333f6d86ae3107a1b9527e7"}, + {file = "nvidia_cudnn_cu11-8.5.0.96-py3-none-manylinux1_x86_64.whl", hash = "sha256:71f8111eb830879ff2836db3cccf03bbd735df9b0d17cd93761732ac50a8a108"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cufft-cu11" +version = "10.9.0.58" +description = "CUFFT native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux1_x86_64.whl", hash = "sha256:222f9da70c80384632fd6035e4c3f16762d64ea7a843829cb278f98b3cb7dd81"}, + {file = "nvidia_cufft_cu11-10.9.0.58-py3-none-win_amd64.whl", hash = "sha256:c4d316f17c745ec9c728e30409612eaf77a8404c3733cdf6c9c1569634d1ca03"}, +] + +[[package]] +name = "nvidia-curand-cu11" +version = "10.2.10.91" +description = "CURAND native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_curand_cu11-10.2.10.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:eecb269c970fa599a2660c9232fa46aaccbf90d9170b96c462e13bcb4d129e2c"}, + {file = "nvidia_curand_cu11-10.2.10.91-py3-none-win_amd64.whl", hash = "sha256:f742052af0e1e75523bde18895a9ed016ecf1e5aa0ecddfcc3658fd11a1ff417"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cusolver-cu11" +version = "11.4.0.1" +description = "CUDA solver native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cusolver_cu11-11.4.0.1-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:72fa7261d755ed55c0074960df5904b65e2326f7adce364cbe4945063c1be412"}, + {file = "nvidia_cusolver_cu11-11.4.0.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:700b781bfefd57d161443aff9ace1878584b93e0b2cfef3d6e9296d96febbf99"}, + {file = "nvidia_cusolver_cu11-11.4.0.1-py3-none-win_amd64.whl", hash = "sha256:00f70b256add65f8c1eb3b6a65308795a93e7740f6df9e273eccbba770d370c4"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cusparse-cu11" +version = "11.7.4.91" +description = "CUSPARSE native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cusparse_cu11-11.7.4.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:a3389de714db63321aa11fbec3919271f415ef19fda58aed7f2ede488c32733d"}, + {file = "nvidia_cusparse_cu11-11.7.4.91-py3-none-win_amd64.whl", hash = "sha256:304a01599534f5186a8ed1c3756879282c72c118bc77dd890dc1ff868cad25b9"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-nccl-cu11" +version = "2.14.3" +description = "NVIDIA Collective Communication Library (NCCL) Runtime" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_nccl_cu11-2.14.3-py3-none-manylinux1_x86_64.whl", hash = "sha256:5e5534257d1284b8e825bc3a182c6f06acd6eb405e9f89d49340e98cd8f136eb"}, +] + +[[package]] +name = "nvidia-nvtx-cu11" +version = "11.7.91" +description = "NVIDIA Tools Extension" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_nvtx_cu11-11.7.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:b22c64eee426a62fc00952b507d6d29cf62b4c9df7a480fcc417e540e05fd5ac"}, + {file = "nvidia_nvtx_cu11-11.7.91-py3-none-win_amd64.whl", hash = "sha256:dfd7fcb2a91742513027d63a26b757f38dd8b07fecac282c4d132a9d373ff064"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + [[package]] name = "oauthlib" version = "3.2.2" @@ -2842,38 +3039,54 @@ files = [ [[package]] name = "torch" -version = "2.0.1" +version = "2.0.0" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" optional = false python-versions = ">=3.8.0" files = [ - {file = "torch-2.0.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:8ced00b3ba471856b993822508f77c98f48a458623596a4c43136158781e306a"}, - {file = "torch-2.0.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:359bfaad94d1cda02ab775dc1cc386d585712329bb47b8741607ef6ef4950747"}, - {file = "torch-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:7c84e44d9002182edd859f3400deaa7410f5ec948a519cc7ef512c2f9b34d2c4"}, - {file = "torch-2.0.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:567f84d657edc5582d716900543e6e62353dbe275e61cdc36eda4929e46df9e7"}, - {file = "torch-2.0.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:787b5a78aa7917465e9b96399b883920c88a08f4eb63b5a5d2d1a16e27d2f89b"}, - {file = "torch-2.0.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:e617b1d0abaf6ced02dbb9486803abfef0d581609b09641b34fa315c9c40766d"}, - {file = "torch-2.0.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b6019b1de4978e96daa21d6a3ebb41e88a0b474898fe251fd96189587408873e"}, - {file = "torch-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:dbd68cbd1cd9da32fe5d294dd3411509b3d841baecb780b38b3b7b06c7754434"}, - {file = "torch-2.0.1-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:ef654427d91600129864644e35deea761fb1fe131710180b952a6f2e2207075e"}, - {file = "torch-2.0.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:25aa43ca80dcdf32f13da04c503ec7afdf8e77e3a0183dd85cd3e53b2842e527"}, - {file = "torch-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5ef3ea3d25441d3957348f7e99c7824d33798258a2bf5f0f0277cbcadad2e20d"}, - {file = "torch-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0882243755ff28895e8e6dc6bc26ebcf5aa0911ed81b2a12f241fc4b09075b13"}, - {file = "torch-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:f66aa6b9580a22b04d0af54fcd042f52406a8479e2b6a550e3d9f95963e168c8"}, - {file = "torch-2.0.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:1adb60d369f2650cac8e9a95b1d5758e25d526a34808f7448d0bd599e4ae9072"}, - {file = "torch-2.0.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:1bcffc16b89e296826b33b98db5166f990e3b72654a2b90673e817b16c50e32b"}, - {file = "torch-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e10e1597f2175365285db1b24019eb6f04d53dcd626c735fc502f1e8b6be9875"}, - {file = "torch-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:423e0ae257b756bb45a4b49072046772d1ad0c592265c5080070e0767da4e490"}, - {file = "torch-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8742bdc62946c93f75ff92da00e3803216c6cce9b132fbca69664ca38cfb3e18"}, - {file = "torch-2.0.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:c62df99352bd6ee5a5a8d1832452110435d178b5164de450831a3a8cc14dc680"}, - {file = "torch-2.0.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:671a2565e3f63b8fe8e42ae3e36ad249fe5e567435ea27b94edaa672a7d0c416"}, + {file = "torch-2.0.0-1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:c9090bda7d2eeeecd74f51b721420dbeb44f838d4536cc1b284e879417e3064a"}, + {file = "torch-2.0.0-1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:bd42db2a48a20574d2c33489e120e9f32789c4dc13c514b0c44272972d14a2d7"}, + {file = "torch-2.0.0-1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8969aa8375bcbc0c2993e7ede0a7f889df9515f18b9b548433f412affed478d9"}, + {file = "torch-2.0.0-1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ab2da16567cb55b67ae39e32d520d68ec736191d88ac79526ca5874754c32203"}, + {file = "torch-2.0.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:7a9319a67294ef02459a19738bbfa8727bb5307b822dadd708bc2ccf6c901aca"}, + {file = "torch-2.0.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9f01fe1f6263f31bd04e1757946fd63ad531ae37f28bb2dbf66f5c826ee089f4"}, + {file = "torch-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:527f4ae68df7b8301ee6b1158ca56350282ea633686537b30dbb5d7b4a52622a"}, + {file = "torch-2.0.0-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:ce9b5a49bd513dff7950a5a07d6e26594dd51989cee05ba388b03e8e366fd5d5"}, + {file = "torch-2.0.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:53e1c33c6896583cdb9a583693e22e99266444c4a43392dddc562640d39e542b"}, + {file = "torch-2.0.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:09651bff72e439d004c991f15add0c397c66f98ab36fe60d5514b44e4da722e8"}, + {file = "torch-2.0.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d439aec349c98f12819e8564b8c54008e4613dd4428582af0e6e14c24ca85870"}, + {file = "torch-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:2802f84f021907deee7e9470ed10c0e78af7457ac9a08a6cd7d55adef835fede"}, + {file = "torch-2.0.0-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:01858620f25f25e7a9ec4b547ff38e5e27c92d38ec4ccba9cfbfb31d7071ed9c"}, + {file = "torch-2.0.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:9a2e53b5783ef5896a6af338b36d782f28e83c8ddfc2ac44b67b066d9d76f498"}, + {file = "torch-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ec5fff2447663e369682838ff0f82187b4d846057ef4d119a8dea7772a0b17dd"}, + {file = "torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11b0384fe3c18c01b8fc5992e70fc519cde65e44c51cc87be1838c1803daf42f"}, + {file = "torch-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:e54846aa63855298cfb1195487f032e413e7ac9cbfa978fda32354cc39551475"}, + {file = "torch-2.0.0-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:cc788cbbbbc6eb4c90e52c550efd067586c2693092cf367c135b34893a64ae78"}, + {file = "torch-2.0.0-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:d292640f0fd72b7a31b2a6e3b635eb5065fcbedd4478f9cad1a1e7a9ec861d35"}, + {file = "torch-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6befaad784004b7af357e3d87fa0863c1f642866291f12a4c2af2de435e8ac5c"}, + {file = "torch-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a83b26bd6ae36fbf5fee3d56973d9816e2002e8a3b7d9205531167c28aaa38a7"}, + {file = "torch-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:c7e67195e1c3e33da53954b026e89a8e1ff3bc1aeb9eb32b677172d4a9b5dcbf"}, + {file = "torch-2.0.0-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6e0b97beb037a165669c312591f242382e9109a240e20054d5a5782d9236cad0"}, + {file = "torch-2.0.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:297a4919aff1c0f98a58ebe969200f71350a1d4d4f986dbfd60c02ffce780e99"}, ] [package.dependencies] filelock = "*" jinja2 = "*" networkx = "*" +nvidia-cublas-cu11 = {version = "11.10.3.66", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-cupti-cu11 = {version = "11.7.101", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-nvrtc-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-runtime-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cudnn-cu11 = {version = "8.5.0.96", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cufft-cu11 = {version = "10.9.0.58", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-curand-cu11 = {version = "10.2.10.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cusolver-cu11 = {version = "11.4.0.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cusparse-cu11 = {version = "11.7.4.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nccl-cu11 = {version = "2.14.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nvtx-cu11 = {version = "11.7.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} sympy = "*" +triton = {version = "2.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} typing-extensions = "*" [package.extras] @@ -2894,6 +3107,43 @@ files = [ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +[[package]] +name = "triton" +version = "2.0.0" +description = "A language and compiler for custom Deep Learning operations" +optional = false +python-versions = "*" +files = [ + {file = "triton-2.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38806ee9663f4b0f7cd64790e96c579374089e58f49aac4a6608121aa55e2505"}, + {file = "triton-2.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:226941c7b8595219ddef59a1fdb821e8c744289a132415ddd584facedeb475b1"}, + {file = "triton-2.0.0-1-cp36-cp36m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4c9fc8c89874bc48eb7e7b2107a9b8d2c0bf139778637be5bfccb09191685cfd"}, + {file = "triton-2.0.0-1-cp37-cp37m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d2684b6a60b9f174f447f36f933e9a45f31db96cb723723ecd2dcfd1c57b778b"}, + {file = "triton-2.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9d4978298b74fcf59a75fe71e535c092b023088933b2f1df933ec32615e4beef"}, + {file = "triton-2.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:74f118c12b437fb2ca25e1a04759173b517582fcf4c7be11913316c764213656"}, + {file = "triton-2.0.0-1-pp37-pypy37_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9618815a8da1d9157514f08f855d9e9ff92e329cd81c0305003eb9ec25cc5add"}, + {file = "triton-2.0.0-1-pp38-pypy38_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1aca3303629cd3136375b82cb9921727f804e47ebee27b2677fef23005c3851a"}, + {file = "triton-2.0.0-1-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e3e13aa8b527c9b642e3a9defcc0fbd8ffbe1c80d8ac8c15a01692478dc64d8a"}, + {file = "triton-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f05a7e64e4ca0565535e3d5d3405d7e49f9d308505bb7773d21fb26a4c008c2"}, + {file = "triton-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb4b99ca3c6844066e516658541d876c28a5f6e3a852286bbc97ad57134827fd"}, + {file = "triton-2.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47b4d70dc92fb40af553b4460492c31dc7d3a114a979ffb7a5cdedb7eb546c08"}, + {file = "triton-2.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fedce6a381901b1547e0e7e1f2546e4f65dca6d91e2d8a7305a2d1f5551895be"}, + {file = "triton-2.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75834f27926eab6c7f00ce73aaf1ab5bfb9bec6eb57ab7c0bfc0a23fac803b4c"}, + {file = "triton-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0117722f8c2b579cd429e0bee80f7731ae05f63fe8e9414acd9a679885fcbf42"}, + {file = "triton-2.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcd9be5d0c2e45d2b7e6ddc6da20112b6862d69741576f9c3dbaf941d745ecae"}, + {file = "triton-2.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a0d2c3fc2eab4ba71384f2e785fbfd47aa41ae05fa58bf12cb31dcbd0aeceb"}, + {file = "triton-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c47b72c72693198163ece9d90a721299e4fb3b8e24fd13141e384ad952724f"}, +] + +[package.dependencies] +cmake = "*" +filelock = "*" +lit = "*" +torch = "*" + +[package.extras] +tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)"] +tutorials = ["matplotlib", "pandas", "tabulate"] + [[package]] name = "typesystem" version = "0.4.1" @@ -3430,4 +3680,4 @@ typesystem = ["typesystem"] [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "0d588d3f5932e2d89c8f552b3365699f4218c1c13b656748e945295c6dd9e7a9" +content-hash = "bf26d2e181e3e40bd978fe0a212ee32bd7fb4167f5357768e5b6c5883ce2007a" diff --git a/pyproject.toml b/pyproject.toml index 7fcbadbd..c4abb3af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -93,7 +93,7 @@ optional = true [tool.poetry.group.ml.dependencies] scikit-learn = "^1.3.0" tensorflow-cpu = { version = "^2.12.0", platform = "linux"} -torch = "^2.0.0" +torch = "^2.0.0, !=2.0.1" [tool.black] line-length = 120 diff --git a/tests/conftest.py b/tests/conftest.py index bdb2080d..d328acb3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,7 @@ import asyncio import tempfile import typing as t +import warnings from contextlib import ExitStack from pathlib import Path from unittest.mock import AsyncMock @@ -21,6 +22,7 @@ try: import numpy as np except Exception: + warnings.warn("Numpy not installed") np = None try: @@ -29,16 +31,19 @@ import sklearn.neural_network import sklearn.pipeline except Exception: + warnings.warn("SKLearn not installed") sklearn = None try: import tensorflow as tf except Exception: + warnings.warn("Tensorflow not installed") tf = None try: import torch except Exception: + warnings.warn("Torch not installed") torch = None DATABASE_URL = "sqlite+aiosqlite://"