Skip to content

Commit

Permalink
Cleanup grammar, styles and code
Browse files Browse the repository at this point in the history
Signed-off-by: Egor Savkin <[email protected]>
  • Loading branch information
thomasfire committed Nov 21, 2022
1 parent 478284f commit 73e07e9
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 27 deletions.
21 changes: 7 additions & 14 deletions artiq/language/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from artiq.language import units
from artiq.language.core import rpc


__all__ = ["NoDefault", "DefaultMissing",
"PYONValue", "BooleanValue", "EnumerationValue",
"NumberValue", "StringValue",
Expand Down Expand Up @@ -49,7 +50,6 @@ def describe(self):

class PYONValue(_SimpleArgProcessor):
"""An argument that can be any PYON-serializable value."""

def __init__(self, default=NoDefault):
# Override the _SimpleArgProcessor init, as list defaults are valid
# PYON values
Expand All @@ -68,7 +68,6 @@ def describe(self):

class BooleanValue(_SimpleArgProcessor):
"""A boolean argument."""

def process(self, x):
if type(x) != bool:
raise ValueError("Invalid BooleanValue value")
Expand All @@ -82,7 +81,6 @@ class EnumerationValue(_SimpleArgProcessor):
:param choices: A list of string representing the possible values of the
argument.
"""

def __init__(self, choices, default=NoDefault):
self.choices = choices
super().__init__(default)
Expand Down Expand Up @@ -143,7 +141,7 @@ def __init__(self, default=NoDefault, unit="", scale=None,
raise KeyError("Unit {} is unknown, you must specify "
"the scale manually".format(unit))
if step is None:
step = scale / 10.0
step = scale/10.0
self.unit = unit
self.scale = scale
self.step = step
Expand Down Expand Up @@ -214,7 +212,6 @@ def get(self, key, processor, group, tooltip):
def check_unprocessed_arguments(self):
pass


class ProcessArgumentManager:
def __init__(self, unprocessed_arguments):
self.unprocessed_arguments = unprocessed_arguments
Expand All @@ -229,17 +226,15 @@ def get(self, key, processor, group, tooltip):
return r

def check_unprocessed_arguments(self):
unprocessed = set(self.unprocessed_arguments.keys()) - \
unprocessed = set(self.unprocessed_arguments.keys()) -\
self._processed_arguments
if unprocessed:
raise AttributeError("Invalid argument(s): " +
", ".join(unprocessed))


class HasEnvironment:
"""Provides methods to manage the environment of an experiment (arguments,
devices, datasets)."""

def __init__(self, managers_or_parent, *args, **kwargs):
self.children = []
if isinstance(managers_or_parent, tuple):
Expand Down Expand Up @@ -395,7 +390,7 @@ def set_dataset_metadata(self, key, metadata_key, metadata_value):
The metadata is saved as HDF5 attributes if there was a call to
``set_dataset(..., archive=True)`` with the same key.
:param key: Already existing key in the dataset, which will be the metadata attached to.
:param key: An already existing key in the dataset, to which you want to attach the metadata.
If absent, KeyError will be raised.
:param metadata_key: Key of the metadata of type string. If already exists, rewrites the metadata.
:param metadata_value: Value to be attached to metadata_key of any valid HDF5 datatype.
Expand Down Expand Up @@ -456,7 +451,6 @@ class Experiment:
Deriving from this class enables automatic experiment discovery in
Python modules.
"""

def prepare(self):
"""Entry point for pre-computing data necessary for running the
experiment.
Expand Down Expand Up @@ -502,7 +496,6 @@ class EnvExperiment(Experiment, HasEnvironment):
:class:`~artiq.language.environment.HasEnvironment` environment manager.
Most experiments should derive from this class."""

def prepare(self):
"""This default prepare method calls :meth:`~artiq.language.environment.Experiment.prepare`
for all children, in the order of registration, if the child has a
Expand All @@ -513,9 +506,9 @@ def prepare(self):
def is_experiment(o):
"""Checks if a Python object is a top-level experiment class."""
return (isclass(o)
and issubclass(o, Experiment)
and o is not Experiment
and o is not EnvExperiment)
and issubclass(o, Experiment)
and o is not Experiment
and o is not EnvExperiment)


def is_public_experiment(o):
Expand Down
14 changes: 5 additions & 9 deletions artiq/master/worker_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from sipyco.sync_struct import Notifier
from sipyco.pc_rpc import AutoTarget, Client, BestEffortClient


logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -55,7 +56,6 @@ class DeviceError(Exception):
class DeviceManager:
"""Handles creation and destruction of local device drivers and controller
RPC clients."""

def __init__(self, ddb, virtual_devices=dict()):
self.ddb = ddb
self.virtual_devices = virtual_devices
Expand Down Expand Up @@ -155,7 +155,7 @@ def append_to(self, key, value):
def get(self, key, archive=False):
if key in self.local:
return self.local[key]

data = self.ddb.get(key)
if archive:
if key in self.archive:
Expand All @@ -165,8 +165,6 @@ def get(self, key, archive=False):
return data

def set_metadata(self, key, metadata_key, metadata_value):
if not (isinstance(metadata_key, str) and isinstance(key, str)):
raise TypeError("both `key` and `metadata_key` should be of type `str`")
if key not in self.local:
raise KeyError(f"Key '{key}' not found in dataset.")
if key not in self.hdf5_attributes:
Expand All @@ -185,11 +183,9 @@ def write_hdf5(self, f):
def write_hdf5_attributes(self, f):
datasets = f["datasets"]
for k, attrs in self.hdf5_attributes.items():
if k in datasets:
for attr_k, attr_v in attrs.items():
datasets[k].attrs[attr_k] = attr_v
else:
raise KeyError(f"Key '{k}' not found in `datasets` group.")
assert k in datasets
for attr_k, attr_v in attrs.items():
datasets[k].attrs[attr_k] = attr_v


def _write(group, k, v):
Expand Down
9 changes: 5 additions & 4 deletions artiq/test/test_hdf5_attributes.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ class HDF5Attributes(EnvExperiment):
"""Archive data to HDF5 with attributes"""

def run(self):
# Attach attributes to the HDF5 group `datasets`
# The key should exist in result HDF5 file.
# Attach attributes metadata to the HDF5 key
# The key should exist in the resulting HDF5 file (archive=True).
self.set_dataset("dummy", np.full(20, np.nan), broadcast=True, archive=True)
self.set_dataset_metadata("dummy", "k1", "v1")
self.set_dataset_metadata("dummy", "k2", "v2")
Expand All @@ -39,10 +39,11 @@ def test_dataset_metadata(self):
self.assertTrue(np.all((self.datasets["dummy"], np.full(20, np.nan))))

def test_write_none(self):
with self.assertRaises(TypeError):
with self.assertRaises(KeyError):
self.exp.set_dataset_metadata(None, "test", "none")
self.exp.set_dataset_metadata("dummy", None, "none")
with self.assertRaises(TypeError):
self.exp.set_dataset_metadata("dummy", None, "none")
self.dump()

def test_write_absent(self):
with self.assertRaises(KeyError):
Expand Down

0 comments on commit 73e07e9

Please sign in to comment.