From 91c5ab57775aeabd7a8ab3d742b85e14995f1c9a Mon Sep 17 00:00:00 2001 From: Denis Barakhtanov Date: Tue, 7 Jan 2025 17:12:06 +1100 Subject: [PATCH] Linter fixes Ave linters ! Signed-off-by: Denis Barakhtanov --- src/client/pydaos/torch/Readme.md | 2 +- src/client/pydaos/torch/__init__.py | 6 ++-- src/client/pydaos/torch/torch_api.py | 7 ++-- src/client/pydaos/torch/torch_shim.c | 6 ++-- src/tests/ftest/directory_tree.py | 22 ++++++------ src/tests/ftest/pytorch/checkpoint.py | 3 +- src/tests/ftest/pytorch/checkpoint.yaml | 32 ++++++++--------- src/tests/ftest/pytorch/map_dataset.py | 10 +++--- src/tests/ftest/pytorch/map_dataset.yaml | 46 ++++++++++++------------ utils/cq/words.dict | 1 + 10 files changed, 67 insertions(+), 68 deletions(-) diff --git a/src/client/pydaos/torch/Readme.md b/src/client/pydaos/torch/Readme.md index 7012e4a324f..941dde4f745 100644 --- a/src/client/pydaos/torch/Readme.md +++ b/src/client/pydaos/torch/Readme.md @@ -66,7 +66,7 @@ plt.show() ### Checkpoint interface -Torch framwork provides a way to save and load model's checkpoints: `torch.save` and `torch.load` functions are used to save and load the model state dictionary. +Torch framework provides a way to save and load model's checkpoints: `torch.save` and `torch.load` functions are used to save and load the model state dictionary. The `torch.save` function expects a state dictionary object and a file like object `Union[str, PathLike, BinaryIO, IO[bytes]]`. To implement such interface, `pydaos.torch.WriteBuffer` class is introduced, which is a wrapper around `io.BufferedIOBase` object, behaving like a writable stream. It accomulates the data in the buffer and writes it to the DAOS container when the close method is called. diff --git a/src/client/pydaos/torch/__init__.py b/src/client/pydaos/torch/__init__.py index eda9667783f..5899ee70a69 100644 --- a/src/client/pydaos/torch/__init__.py +++ b/src/client/pydaos/torch/__init__.py @@ -1,6 +1,6 @@ -# (C) Copyright 2024 Intel Corporation. -# (C) Copyright 2024 Google LLC -# (C) Copyright 2024 Enakta Labs Ltd +# (C) Copyright 2024-2025 Intel Corporation. +# (C) Copyright 2024-2025 Google LLC +# (C) Copyright 2024-2025 Enakta Labs Ltd # # SPDX-License-Identifier: BSD-2-Clause-Patent # diff --git a/src/client/pydaos/torch/torch_api.py b/src/client/pydaos/torch/torch_api.py index b9e54b7c4e9..192d069bc5c 100644 --- a/src/client/pydaos/torch/torch_api.py +++ b/src/client/pydaos/torch/torch_api.py @@ -1,6 +1,6 @@ # -# (C) Copyright 2024 Google LLC -# (C) Copyright 2024 Enakta Labs Ltd +# (C) Copyright 2024-2025 Google LLC +# (C) Copyright 2024-2025 Enakta Labs Ltd # # SPDX-License-Identifier: BSD-2-Clause-Patent # @@ -307,16 +307,13 @@ def closed(self): """Return True if the file is closed.""" return self._closed - @property def writable(self): """Return True if the file is writable.""" return True - @property def readable(self): return False - @property def seekable(self): """Return True if the file is seekable.""" return False diff --git a/src/client/pydaos/torch/torch_shim.c b/src/client/pydaos/torch/torch_shim.c index bb2907f78e9..7456c074a13 100644 --- a/src/client/pydaos/torch/torch_shim.c +++ b/src/client/pydaos/torch/torch_shim.c @@ -1,7 +1,7 @@ /** - * (C) Copyright 2019-2024 Intel Corporation. - * (C) Copyright 2024 Google LLC - * (C) Copyright 2024 Enakta Labs Ltd + * (C) Copyright 2019-2025 Intel Corporation. + * (C) Copyright 2024-2025 Google LLC + * (C) Copyright 2024-2025 Enakta Labs Ltd * * SPDX-License-Identifier: BSD-2-Clause-Patent */ diff --git a/src/tests/ftest/directory_tree.py b/src/tests/ftest/directory_tree.py index c39cbda6f1e..fae2064682c 100644 --- a/src/tests/ftest/directory_tree.py +++ b/src/tests/ftest/directory_tree.py @@ -112,9 +112,11 @@ def get_probe(self): needle_name = os.path.basename(needle_path) return needle_name, needle_path - def set_file_size(self, min=0, max=0): - self._file_size_min = min - self._file_size_max = max + def set_file_size(self, fmin=0, fmax=0): + """ Set the minimum and maximum file size """ + + self._file_size_min = fmin + self._file_size_max = fmax def _create_dir_tree(self, current_path, current_height): """Create the actual directory tree using depth-first search approach. @@ -130,7 +132,7 @@ def _create_dir_tree(self, current_path, current_height): # create files for _ in range(self._files_per_node): - self._mktemp_file(dir=current_path, suffix=".file") + self._mktemp_file(where=current_path, suffix=".file") # create nested directories for _ in range(self._subdirs_per_node): @@ -149,7 +151,7 @@ def _created_remaining_needles(self): for count in range(self._needles_count): new_path = os.path.dirname(random.choice(self._needles_paths)) # nosec suffix = f"_{count:05d}.needle" - self._mktemp_file(dir=new_path, prefix=self._needles_prefix, suffix=suffix) + self._mktemp_file(where=new_path, prefix=self._needles_prefix, suffix=suffix) def _create_needle(self, current_path, current_height): """Create a *.needle file if we reach the bottom of the tree. @@ -166,17 +168,17 @@ def _create_needle(self, current_path, current_height): self._needles_count -= 1 suffix = "_{:05d}.needle".format(self._needles_count) - file_name = self._mktemp_file(dir=current_path, prefix=self._needles_prefix, suffix=suffix) + file_name = self._mktemp_file(where=current_path, prefix=self._needles_prefix, suffix=suffix) self._needles_paths.append(file_name) - def _mktemp_file(self, dir=None, prefix=None, suffix=None): + def _mktemp_file(self, where=None, prefix=None, suffix=None): """Create a temporary file. If the file size is 0, the file will be empty. If the file size is greater than 0, the file will be filled with random data. If min and max file size are different, the file size will be a random between min and max. """ - fd, fname = tempfile.mkstemp(dir=dir, prefix=prefix, suffix=suffix) + fd, fname = tempfile.mkstemp(dir=where, prefix=prefix, suffix=suffix) if self._file_size_min == 0: os.close(fd) return fname @@ -190,8 +192,8 @@ def _mktemp_file(self, dir=None, prefix=None, suffix=None): return fname - -def _populate_dir_tree(path, height, subdirs_per_node, files_per_node, needles, prefix, file_size_min, file_size_max): +def _populate_dir_tree(path, height, subdirs_per_node, files_per_node, needles, prefix, + file_size_min, file_size_max): """Create a directory tree and its needle files. Args: diff --git a/src/tests/ftest/pytorch/checkpoint.py b/src/tests/ftest/pytorch/checkpoint.py index 2e45e384b26..992fe9bcb6b 100644 --- a/src/tests/ftest/pytorch/checkpoint.py +++ b/src/tests/ftest/pytorch/checkpoint.py @@ -1,4 +1,5 @@ """ + (C) Copyright 2025 Intel Corporation. (C) Copyright 2025 Google LLC SPDX-License-Identifier: BSD-2-Clause-Patent @@ -19,7 +20,7 @@ class PytorchCheckpointTest(TestWithServers): def test_checkpoint(self): """Test Pytorch Checkpoint interface - Test Description: Ensure that wirting and reading a checkpoint works as expected. + Test Description: Ensure that writing and reading a checkpoint works as expected. :avocado: tags=all,full_regression :avocado: tags=vm diff --git a/src/tests/ftest/pytorch/checkpoint.yaml b/src/tests/ftest/pytorch/checkpoint.yaml index 83b0275c286..8e287e409a5 100644 --- a/src/tests/ftest/pytorch/checkpoint.yaml +++ b/src/tests/ftest/pytorch/checkpoint.yaml @@ -1,23 +1,23 @@ hosts: - test_servers: 1 - test_clients: 1 + test_servers: 1 + test_clients: 1 server_config: - name: daos_server - engines_per_host: 1 - engines: + name: daos_server + engines_per_host: 1 + engines: + 0: + targets: 4 + nr_xs_helpers: 0 + storage: 0: - targets: 4 - nr_xs_helpers: 0 - storage: - 0: - class: ram - scm_mount: /mnt/daos - system_ram_reserved: 1 + class: ram + scm_mount: /mnt/daos + system_ram_reserved: 1 pool: - size: 1G + size: 1G container: - type: POSIX - control_method: daos + type: POSIX + control_method: daos checkpoint: - writes: 100 + writes: 100 diff --git a/src/tests/ftest/pytorch/map_dataset.py b/src/tests/ftest/pytorch/map_dataset.py index 174878c2871..d4523025dd3 100644 --- a/src/tests/ftest/pytorch/map_dataset.py +++ b/src/tests/ftest/pytorch/map_dataset.py @@ -1,4 +1,5 @@ """ + (C) Copyright 2025 Intel Corporation. (C) Copyright 2025 Google LLC SPDX-License-Identifier: BSD-2-Clause-Patent @@ -78,7 +79,7 @@ def test_map_style_dataset(self): def test_dataloader(self): """Test Map Style Dataset with DataLoader. - Test Description: Ensure that the dataloader can read all the samples that were seeded. + Test Description: Ensure that the DataLoader can read all the samples that were seeded. :avocado: tags=all,full_regression :avocado: tags=vm @@ -96,7 +97,7 @@ def test_dataloader(self): subdirs = self.params.get("subdirs", "/run/dataloader/*") files_per_node = self.params.get("files_per_node", "/run/dataloader/*") - # Dataloader requires that samples are of the same size + # DataLoader requires that samples are of the same size file_min_size = self.params.get("file_min_size", "/run/dataloader/*", 4096) file_max_size = self.params.get("file_max_size", "/run/dataloader/*", 4096) @@ -148,10 +149,7 @@ def _test_dataloader(self, pool, container, hashes, batch_size, processes): if hashes != actual: self.fail( - f"dataloader with nproc={processes} and bs={batch_size} did not fetch all samples") - else: - self.log.info( - f"dataloader with nproc={processes} and bs={batch_size} fetched all samples") + f"DataLoader with nproc={processes} and bs={batch_size} did not fetch all samples") def _create_test_files(self, path, height, subdirs, files_per_node, min_size, max_size): """Create a directory tree""" diff --git a/src/tests/ftest/pytorch/map_dataset.yaml b/src/tests/ftest/pytorch/map_dataset.yaml index f909a8e182f..31c16c66164 100644 --- a/src/tests/ftest/pytorch/map_dataset.yaml +++ b/src/tests/ftest/pytorch/map_dataset.yaml @@ -1,32 +1,32 @@ hosts: - test_servers: 1 - test_clients: 1 + test_servers: 1 + test_clients: 1 server_config: - name: daos_server - engines_per_host: 1 - engines: + name: daos_server + engines_per_host: 1 + engines: + 0: + targets: 4 + nr_xs_helpers: 0 + storage: 0: - targets: 4 - nr_xs_helpers: 0 - storage: - 0: - class: ram - scm_mount: /mnt/daos - system_ram_reserved: 1 + class: ram + scm_mount: /mnt/daos + system_ram_reserved: 1 pool: - size: 1G + size: 1G container: - type: POSIX - control_method: daos + type: POSIX + control_method: daos map_style_dataset: - tree_height: 4 - subdirs: 3 - files_per_node: 5 + tree_height: 4 + subdirs: 3 + files_per_node: 5 dataloader: - tree_height: 3 - subdirs: 3 - files_per_node: 8 - processes: [0, 1, 2, 3, 4, 8] - batch_size: [2, 4, 8, 16] + tree_height: 3 + subdirs: 3 + files_per_node: 8 + processes: [0, 1, 2, 3, 4, 8] + batch_size: [2, 4, 8, 16] diff --git a/utils/cq/words.dict b/utils/cq/words.dict index 102ca21a882..4a8d20041b8 100644 --- a/utils/cq/words.dict +++ b/utils/cq/words.dict @@ -409,6 +409,7 @@ scancel scm scons scontrol +seekable sharedctypes shlex simul