diff --git a/README.md b/README.md index 72e1afe8..7e685abf 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +[![Downloads](https://static.pepy.tech/badge/dicee)](https://pepy.tech/project/dicee) +[![Downloads](https://img.shields.io/pypi/dm/dicee)](https://pypi.org/project/dicee/) [![Coverage](https://img.shields.io/badge/coverage-54%25-green)](https://dice-group.github.io/dice-embeddings/usage/main.html#coverage-report) [![Pypi](https://img.shields.io/badge/pypi-0.1.4-blue)](https://pypi.org/project/dicee/0.1.4/) [![Docs](https://img.shields.io/badge/documentation-0.1.4-yellow)](https://dice-group.github.io/dice-embeddings/index.html) diff --git a/dicee/trainer/model_parallelism.py b/dicee/trainer/model_parallelism.py index b0cb758d..8f2eba9b 100644 --- a/dicee/trainer/model_parallelism.py +++ b/dicee/trainer/model_parallelism.py @@ -87,7 +87,7 @@ def increase_batch_size_until_cuda_out_of_memory(ensemble_model, train_loader, b return batch_sizes_and_mem_usages,True except torch.OutOfMemoryError as e: - print(f"torch.OutOfMemoryError caught! {e}") + print(f"torch.OutOfMemoryError caught! {e}\n\n") return batch_sizes_and_mem_usages, False history_batch_sizes_and_mem_usages=[] diff --git a/examples/multi_hop_query_answering/benchmarking.py b/examples/multi_hop_query_answering/benchmarking.py index 25286986..9987c0cb 100644 --- a/examples/multi_hop_query_answering/benchmarking.py +++ b/examples/multi_hop_query_answering/benchmarking.py @@ -31,7 +31,7 @@ args = Namespace() args.model = kge_name args.scoring_technique = "KvsAll" - args.path_dataset_folder = "KGs/UMLS" + args.dataset_dir = "KGs/UMLS" args.num_epochs = 20 args.batch_size = 1024 args.lr = 0.1