From 5e425a6b4ce9fcbb3c6ae9c6206ccc44383d4588 Mon Sep 17 00:00:00 2001 From: Michael Osthege Date: Sun, 26 Nov 2023 16:43:13 +0100 Subject: [PATCH] Compatibility fix for PyTensor >=2.18.1 --- environment.yml | 2 +- pytensor_federated/__init__.py | 2 +- pytensor_federated/op_async.py | 12 +++--------- pytensor_federated/wrapper_ops.py | 6 +----- 4 files changed, 6 insertions(+), 16 deletions(-) diff --git a/environment.yml b/environment.yml index 6534e74..0cfa71f 100644 --- a/environment.yml +++ b/environment.yml @@ -12,4 +12,4 @@ dependencies: - psutil - pip: - betterproto[compiler]==2.0.0b6 - - pymc==5.8.0 + - pymc==5.10.0 diff --git a/pytensor_federated/__init__.py b/pytensor_federated/__init__.py index 8077dcc..21bdc4b 100644 --- a/pytensor_federated/__init__.py +++ b/pytensor_federated/__init__.py @@ -19,4 +19,4 @@ from .service import ArraysToArraysService, ArraysToArraysServiceClient from .signatures import ComputeFunc, LogpFunc, LogpGradFunc -__version__ = "1.0.0" +__version__ = "1.0.1" diff --git a/pytensor_federated/op_async.py b/pytensor_federated/op_async.py index 302891a..72428a1 100644 --- a/pytensor_federated/op_async.py +++ b/pytensor_federated/op_async.py @@ -7,7 +7,7 @@ from pytensor.graph import FunctionGraph from pytensor.graph.basic import Apply, Variable, apply_depends_on from pytensor.graph.features import ReplaceValidate -from pytensor.graph.op import Op, OutputStorageType, ParamsInputType +from pytensor.graph.op import Op, OutputStorageType from pytensor.graph.rewriting.basic import GraphRewriter from .utils import get_useful_event_loop @@ -19,10 +19,9 @@ def perform( node: Apply, inputs: Sequence[Any], output_storage: OutputStorageType, - params: ParamsInputType = None, ) -> None: loop = get_useful_event_loop() - coro = self.perform_async(node, inputs, output_storage, params) + coro = self.perform_async(node, inputs, output_storage) loop.run_until_complete(coro) return @@ -31,7 +30,6 @@ async def perform_async( node: Apply, inputs: Sequence[Any], output_storage: OutputStorageType, - params: ParamsInputType = None, ) -> None: raise NotImplementedError() @@ -57,7 +55,6 @@ async def perform_async( node: Apply, inputs: Sequence[Any], output_storage: OutputStorageType, - params: ParamsInputType = None, ) -> None: outs = await self.__async_fn(*inputs) if not isinstance(outs, (list, tuple)): @@ -112,7 +109,6 @@ def perform( node: Apply, inputs: Sequence[Any], output_storage: OutputStorageType, - params: ParamsInputType = None, ) -> None: # Create coroutines the performing the taks of each child node coros = [] @@ -122,9 +118,7 @@ def perform( ito = ifrom + apply.nin oto = ofrom + apply.nout coros.append( - apply.op.perform_async( - apply, inputs[ifrom:ito], output_storage[ofrom:oto], params=params - ) + apply.op.perform_async(apply, inputs[ifrom:ito], output_storage[ofrom:oto]) ) ifrom = ito ofrom = oto diff --git a/pytensor_federated/wrapper_ops.py b/pytensor_federated/wrapper_ops.py index 6585109..f123330 100644 --- a/pytensor_federated/wrapper_ops.py +++ b/pytensor_federated/wrapper_ops.py @@ -5,7 +5,7 @@ import pytensor.tensor as at from pytensor.compile.ops import FromFunctionOp from pytensor.graph.basic import Apply, Variable -from pytensor.graph.op import Op, OutputStorageType, ParamsInputType +from pytensor.graph.op import Op, OutputStorageType from .op_async import AsyncFromFunctionOp, AsyncOp from .signatures import ComputeFunc, LogpFunc, LogpGradFunc @@ -63,7 +63,6 @@ def perform( node: Apply, inputs: Sequence[np.ndarray], output_storage: OutputStorageType, - params: ParamsInputType = None, ) -> None: logp = self._logp_func(*inputs) output_storage[0][0] = logp @@ -76,7 +75,6 @@ async def perform_async( node: Apply, inputs: Sequence[Any], output_storage: OutputStorageType, - params: ParamsInputType = None, ) -> None: logp = await self._logp_func(*inputs) output_storage[0][0] = logp @@ -111,7 +109,6 @@ def perform( node: Apply, inputs: Sequence[np.ndarray], output_storage: OutputStorageType, - params: ParamsInputType = None, ) -> None: logp, gradient = self._logp_grad_func(*inputs) output_storage[0][0] = logp @@ -141,7 +138,6 @@ async def perform_async( node: Apply, inputs: Sequence[Any], output_storage: OutputStorageType, - params: ParamsInputType = None, ) -> None: logp, gradient = await self._logp_grad_func(*inputs) output_storage[0][0] = logp