Skip to content

Commit

Permalink
Cleanup grammar, styles and code
Browse files Browse the repository at this point in the history
Signed-off-by: Egor Savkin <[email protected]>
  • Loading branch information
thomasfire committed Dec 2, 2022
1 parent e8ff6f8 commit 1685f50
Show file tree
Hide file tree
Showing 4 changed files with 24 additions and 38 deletions.
29 changes: 11 additions & 18 deletions artiq/language/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from artiq.language import units
from artiq.language.core import rpc


__all__ = ["NoDefault", "DefaultMissing",
"PYONValue", "BooleanValue", "EnumerationValue",
"NumberValue", "StringValue",
Expand Down Expand Up @@ -49,7 +50,6 @@ def describe(self):

class PYONValue(_SimpleArgProcessor):
"""An argument that can be any PYON-serializable value."""

def __init__(self, default=NoDefault):
# Override the _SimpleArgProcessor init, as list defaults are valid
# PYON values
Expand All @@ -68,7 +68,6 @@ def describe(self):

class BooleanValue(_SimpleArgProcessor):
"""A boolean argument."""

def process(self, x):
if type(x) != bool:
raise ValueError("Invalid BooleanValue value")
Expand All @@ -82,7 +81,6 @@ class EnumerationValue(_SimpleArgProcessor):
:param choices: A list of string representing the possible values of the
argument.
"""

def __init__(self, choices, default=NoDefault):
self.choices = choices
super().__init__(default)
Expand Down Expand Up @@ -143,7 +141,7 @@ def __init__(self, default=NoDefault, unit="", scale=None,
raise KeyError("Unit {} is unknown, you must specify "
"the scale manually".format(unit))
if step is None:
step = scale / 10.0
step = scale/10.0
self.unit = unit
self.scale = scale
self.step = step
Expand Down Expand Up @@ -212,7 +210,6 @@ def get(self, key, processor, group, tooltip):
return None



class ProcessArgumentManager:
def __init__(self, unprocessed_arguments):
self.unprocessed_arguments = unprocessed_arguments
Expand All @@ -227,17 +224,15 @@ def get(self, key, processor, group, tooltip):
return r

def check_unprocessed_arguments(self):
unprocessed = set(self.unprocessed_arguments.keys()) - \
unprocessed = set(self.unprocessed_arguments.keys()) -\
self._processed_arguments
if unprocessed:
raise AttributeError("Supplied argument(s) not queried in experiment: " +
", ".join(unprocessed))


class HasEnvironment:
"""Provides methods to manage the environment of an experiment (arguments,
devices, datasets)."""

def __init__(self, managers_or_parent, *args, **kwargs):
self.children = []
if isinstance(managers_or_parent, tuple):
Expand Down Expand Up @@ -386,16 +381,16 @@ def append_to_dataset(self, key, value):

@rpc(flags={"async"})
def set_dataset_metadata(self, key, metadata_key, metadata_value):
"""Attach metadata to the key in the dataset.
"""Attach metadata to the dataset.
The metadata is saved as HDF5 attributes if there was a call to
``set_dataset(..., archive=True)`` with the same key.
:param key: Already existing key in the dataset, which will be the metadata attached to.
:param key: The already existing dataset, to which you want to attach the metadata.
If absent, KeyError will be raised.
:param metadata_key: Key of the metadata of type string. If already exists, rewrites the metadata.
:param metadata_value: Value to be attached to metadata_key of any valid HDF5 datatype.
See https://docs.hdfgroup.org/hdf5/develop/group___h5_t.html for additional information.
:param metadata_key: The metadata key of type string. If already exists, rewrites the metadata.
:param metadata_value: Value to be attached to ``metadata_key``. Can be any valid HDF5 datatype.
See HDF5 documentation for additional information.
"""
self.__dataset_mgr.set_metadata(key, metadata_key, metadata_value)

Expand Down Expand Up @@ -452,7 +447,6 @@ class Experiment:
Deriving from this class enables automatic experiment discovery in
Python modules.
"""

def prepare(self):
"""Entry point for pre-computing data necessary for running the
experiment.
Expand Down Expand Up @@ -498,7 +492,6 @@ class EnvExperiment(Experiment, HasEnvironment):
:class:`~artiq.language.environment.HasEnvironment` environment manager.
Most experiments should derive from this class."""

def prepare(self):
"""This default prepare method calls :meth:`~artiq.language.environment.Experiment.prepare`
for all children, in the order of registration, if the child has a
Expand All @@ -509,9 +502,9 @@ def prepare(self):
def is_experiment(o):
"""Checks if a Python object is a top-level experiment class."""
return (isclass(o)
and issubclass(o, Experiment)
and o is not Experiment
and o is not EnvExperiment)
and issubclass(o, Experiment)
and o is not Experiment
and o is not EnvExperiment)


def is_public_experiment(o):
Expand Down
22 changes: 8 additions & 14 deletions artiq/master/worker_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from sipyco.sync_struct import Notifier
from sipyco.pc_rpc import AutoTarget, Client, BestEffortClient


logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -55,7 +56,6 @@ class DeviceError(Exception):
class DeviceManager:
"""Handles creation and destruction of local device drivers and controller
RPC clients."""

def __init__(self, ddb, virtual_devices=dict()):
self.ddb = ddb
self.virtual_devices = virtual_devices
Expand Down Expand Up @@ -155,7 +155,7 @@ def append_to(self, key, value):
def get(self, key, archive=False):
if key in self.local:
return self.local[key]

data = self.ddb.get(key)
if archive:
if key in self.archive:
Expand All @@ -165,10 +165,8 @@ def get(self, key, archive=False):
return data

def set_metadata(self, key, metadata_key, metadata_value):
if not (isinstance(metadata_key, str) and isinstance(key, str)):
raise TypeError("both `key` and `metadata_key` should be of type `str`")
if key not in self.local:
raise KeyError(f"Key '{key}' not found in dataset.")
raise KeyError(f"Dataset '{key}' does not exist.")
if key not in self.hdf5_attributes:
self.hdf5_attributes[key] = dict()
self.hdf5_attributes[key][metadata_key] = metadata_value
Expand All @@ -178,19 +176,15 @@ def write_hdf5(self, f):
for k, v in self.local.items():
_write(datasets_group, k, v)

for k, attrs in self.hdf5_attributes.items():
assert k in datasets_group
for attr_k, attr_v in attrs.items():
datasets_group[k].attrs[attr_k] = attr_v

archive_group = f.create_group("archive")
for k, v in self.archive.items():
_write(archive_group, k, v)

def write_hdf5_attributes(self, f):
datasets = f["datasets"]
for k, attrs in self.hdf5_attributes.items():
if k in datasets:
for attr_k, attr_v in attrs.items():
datasets[k].attrs[attr_k] = attr_v
else:
raise KeyError(f"Key '{k}' not found in `datasets` group.")


def _write(group, k, v):
# Add context to exception message when the user writes a dataset that is
Expand Down
1 change: 0 additions & 1 deletion artiq/master/worker_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,6 @@ def write_results():
filename = "{:09}-{}.h5".format(rid, exp.__name__)
with h5py.File(filename, "w") as f:
dataset_mgr.write_hdf5(f)
dataset_mgr.write_hdf5_attributes(f)
f["artiq_version"] = artiq_version
f["rid"] = rid
f["start_time"] = start_time
Expand Down
10 changes: 5 additions & 5 deletions artiq/test/test_hdf5_attributes.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ class HDF5Attributes(EnvExperiment):
"""Archive data to HDF5 with attributes"""

def run(self):
# Attach attributes to the HDF5 group `datasets`
# The key should exist in result HDF5 file.
# Attach attributes metadata to the HDF5 key
# The key should exist in the resulting HDF5 file (archive=True).
self.set_dataset("dummy", np.full(20, np.nan), broadcast=True, archive=True)
self.set_dataset_metadata("dummy", "k1", "v1")
self.set_dataset_metadata("dummy", "k2", "v2")
Expand All @@ -28,7 +28,6 @@ def dump(self):
self.bio = io.BytesIO()
with h5py.File(self.bio, "w") as f:
self.dataset_mgr.write_hdf5(f)
self.dataset_mgr.write_hdf5_attributes(f)

self.bio.seek(0)
self.h5file = h5py.File(self.bio, "r")
Expand All @@ -39,10 +38,11 @@ def test_dataset_metadata(self):
self.assertTrue(np.all((self.datasets["dummy"], np.full(20, np.nan))))

def test_write_none(self):
with self.assertRaises(TypeError):
with self.assertRaises(KeyError):
self.exp.set_dataset_metadata(None, "test", "none")
self.exp.set_dataset_metadata("dummy", None, "none")
with self.assertRaises(TypeError):
self.exp.set_dataset_metadata("dummy", None, "none")
self.dump()

def test_write_absent(self):
with self.assertRaises(KeyError):
Expand Down

0 comments on commit 1685f50

Please sign in to comment.