diff --git a/artiq/language/environment.py b/artiq/language/environment.py index bb3aa7bbbc..5906d7bb88 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -6,6 +6,7 @@ from artiq.language import units from artiq.language.core import rpc + __all__ = ["NoDefault", "DefaultMissing", "PYONValue", "BooleanValue", "EnumerationValue", "NumberValue", "StringValue", @@ -49,7 +50,6 @@ def describe(self): class PYONValue(_SimpleArgProcessor): """An argument that can be any PYON-serializable value.""" - def __init__(self, default=NoDefault): # Override the _SimpleArgProcessor init, as list defaults are valid # PYON values @@ -68,7 +68,6 @@ def describe(self): class BooleanValue(_SimpleArgProcessor): """A boolean argument.""" - def process(self, x): if type(x) != bool: raise ValueError("Invalid BooleanValue value") @@ -82,7 +81,6 @@ class EnumerationValue(_SimpleArgProcessor): :param choices: A list of string representing the possible values of the argument. """ - def __init__(self, choices, default=NoDefault): self.choices = choices super().__init__(default) @@ -143,7 +141,7 @@ def __init__(self, default=NoDefault, unit="", scale=None, raise KeyError("Unit {} is unknown, you must specify " "the scale manually".format(unit)) if step is None: - step = scale / 10.0 + step = scale/10.0 self.unit = unit self.scale = scale self.step = step @@ -214,7 +212,6 @@ def get(self, key, processor, group, tooltip): def check_unprocessed_arguments(self): pass - class ProcessArgumentManager: def __init__(self, unprocessed_arguments): self.unprocessed_arguments = unprocessed_arguments @@ -229,17 +226,15 @@ def get(self, key, processor, group, tooltip): return r def check_unprocessed_arguments(self): - unprocessed = set(self.unprocessed_arguments.keys()) - \ + unprocessed = set(self.unprocessed_arguments.keys()) -\ self._processed_arguments if unprocessed: raise AttributeError("Invalid argument(s): " + ", ".join(unprocessed)) - class HasEnvironment: """Provides methods to manage the environment of an experiment (arguments, devices, datasets).""" - def __init__(self, managers_or_parent, *args, **kwargs): self.children = [] if isinstance(managers_or_parent, tuple): @@ -395,7 +390,7 @@ def set_dataset_metadata(self, key, metadata_key, metadata_value): The metadata is saved as HDF5 attributes if there was a call to ``set_dataset(..., archive=True)`` with the same key. - :param key: Already existing key in the dataset, which will be the metadata attached to. + :param key: An already existing key in the dataset, to which you want to attach the metadata. If absent, KeyError will be raised. :param metadata_key: Key of the metadata of type string. If already exists, rewrites the metadata. :param metadata_value: Value to be attached to metadata_key of any valid HDF5 datatype. @@ -456,7 +451,6 @@ class Experiment: Deriving from this class enables automatic experiment discovery in Python modules. """ - def prepare(self): """Entry point for pre-computing data necessary for running the experiment. @@ -502,7 +496,6 @@ class EnvExperiment(Experiment, HasEnvironment): :class:`~artiq.language.environment.HasEnvironment` environment manager. Most experiments should derive from this class.""" - def prepare(self): """This default prepare method calls :meth:`~artiq.language.environment.Experiment.prepare` for all children, in the order of registration, if the child has a @@ -513,9 +506,9 @@ def prepare(self): def is_experiment(o): """Checks if a Python object is a top-level experiment class.""" return (isclass(o) - and issubclass(o, Experiment) - and o is not Experiment - and o is not EnvExperiment) + and issubclass(o, Experiment) + and o is not Experiment + and o is not EnvExperiment) def is_public_experiment(o): diff --git a/artiq/master/worker_db.py b/artiq/master/worker_db.py index 18b6469232..8b827fed00 100644 --- a/artiq/master/worker_db.py +++ b/artiq/master/worker_db.py @@ -11,6 +11,7 @@ from sipyco.sync_struct import Notifier from sipyco.pc_rpc import AutoTarget, Client, BestEffortClient + logger = logging.getLogger(__name__) @@ -55,7 +56,6 @@ class DeviceError(Exception): class DeviceManager: """Handles creation and destruction of local device drivers and controller RPC clients.""" - def __init__(self, ddb, virtual_devices=dict()): self.ddb = ddb self.virtual_devices = virtual_devices @@ -155,7 +155,7 @@ def append_to(self, key, value): def get(self, key, archive=False): if key in self.local: return self.local[key] - + data = self.ddb.get(key) if archive: if key in self.archive: @@ -165,8 +165,6 @@ def get(self, key, archive=False): return data def set_metadata(self, key, metadata_key, metadata_value): - if not (isinstance(metadata_key, str) and isinstance(key, str)): - raise TypeError("both `key` and `metadata_key` should be of type `str`") if key not in self.local: raise KeyError(f"Key '{key}' not found in dataset.") if key not in self.hdf5_attributes: @@ -185,11 +183,9 @@ def write_hdf5(self, f): def write_hdf5_attributes(self, f): datasets = f["datasets"] for k, attrs in self.hdf5_attributes.items(): - if k in datasets: - for attr_k, attr_v in attrs.items(): - datasets[k].attrs[attr_k] = attr_v - else: - raise KeyError(f"Key '{k}' not found in `datasets` group.") + assert k in datasets + for attr_k, attr_v in attrs.items(): + datasets[k].attrs[attr_k] = attr_v def _write(group, k, v): diff --git a/artiq/test/test_hdf5_attributes.py b/artiq/test/test_hdf5_attributes.py index ff20dad513..f7f65ff104 100644 --- a/artiq/test/test_hdf5_attributes.py +++ b/artiq/test/test_hdf5_attributes.py @@ -11,8 +11,8 @@ class HDF5Attributes(EnvExperiment): """Archive data to HDF5 with attributes""" def run(self): - # Attach attributes to the HDF5 group `datasets` - # The key should exist in result HDF5 file. + # Attach attributes metadata to the HDF5 key + # The key should exist in the resulting HDF5 file (archive=True). self.set_dataset("dummy", np.full(20, np.nan), broadcast=True, archive=True) self.set_dataset_metadata("dummy", "k1", "v1") self.set_dataset_metadata("dummy", "k2", "v2") @@ -39,10 +39,11 @@ def test_dataset_metadata(self): self.assertTrue(np.all((self.datasets["dummy"], np.full(20, np.nan)))) def test_write_none(self): - with self.assertRaises(TypeError): + with self.assertRaises(KeyError): self.exp.set_dataset_metadata(None, "test", "none") + self.exp.set_dataset_metadata("dummy", None, "none") with self.assertRaises(TypeError): - self.exp.set_dataset_metadata("dummy", None, "none") + self.dump() def test_write_absent(self): with self.assertRaises(KeyError):