Skip to content

Commit

Permalink
'Refactored by Sourcery'
Browse files Browse the repository at this point in the history
  • Loading branch information
Sourcery AI committed Oct 25, 2023
1 parent 894e6a6 commit f44f4df
Show file tree
Hide file tree
Showing 6 changed files with 35 additions and 74 deletions.
20 changes: 8 additions & 12 deletions nxdrive/client/uploader/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -357,11 +357,9 @@ def upload_chunks(self, transfer: Upload, blob: FileBlob, chunked: bool, /) -> N
self.dao.update_upload(transfer)
transfer.is_dirty = False

# Handle status changes every time a chunk is sent
_transfer = self.get_upload(
if _transfer := self.get_upload(

Check warning on line 360 in nxdrive/client/uploader/__init__.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/client/uploader/__init__.py#L360

Added line #L360 was not covered by tests
doc_pair=transfer.doc_pair, path=transfer.path
)
if _transfer:
):
self._handle_transfer_status(_transfer)
else:
uploader.upload()
Expand Down Expand Up @@ -451,12 +449,11 @@ def link_blob_to_doc(
kwargs["headers"] = headers
try:
doc_type = kwargs.get("doc_type", "")
if transfer.is_direct_transfer and doc_type and doc_type != "":
res = self._transfer_docType_file(transfer, headers, doc_type)
else:
res = self._transfer_autoType_file(command, blob, kwargs)

return res
return (
self._transfer_docType_file(transfer, headers, doc_type)
if transfer.is_direct_transfer and doc_type and doc_type != ""
else self._transfer_autoType_file(command, blob, kwargs)
)
except Exception as exc:
err = f"Error while linking blob to doc: {exc!r}"
log.warning(err)
Expand Down Expand Up @@ -503,11 +500,10 @@ def _transfer_docType_file(
data=content,
ssl_verify=self.verification_needed,
)
res = self.remote.fetch(
return self.remote.fetch(

Check warning on line 503 in nxdrive/client/uploader/__init__.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/client/uploader/__init__.py#L503

Added line #L503 was not covered by tests
f"{self.remote.client.api_path}/path{transfer.remote_parent_path}",
headers=headers,
)
return res

@staticmethod
def _complete_upload(transfer: Upload, blob: FileBlob, /) -> None:
Expand Down
38 changes: 12 additions & 26 deletions nxdrive/dao/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -690,7 +690,7 @@ def release_processor(self, processor_id: int, /) -> bool:
"UPDATE States SET processor = 0 WHERE processor = ?", (processor_id,)
)
log.debug(f"Released processor {processor_id}")
return bool(c.rowcount > 0)
return c.rowcount > 0

def acquire_processor(self, thread_id: int, row_id: int, /) -> bool:
with self.lock:
Expand All @@ -702,7 +702,7 @@ def acquire_processor(self, thread_id: int, row_id: int, /) -> bool:
" AND processor IN (0, ?)",
(thread_id, row_id, thread_id),
)
return bool(c.rowcount == 1)
return c.rowcount == 1

def _reinit_states(self, cursor: Cursor, /) -> None:
cursor.execute("DROP TABLE States")
Expand Down Expand Up @@ -748,7 +748,7 @@ def delete_remote_state(self, doc_pair: DocPair, /) -> None:
c.execute(f"{update} WHERE id = ?", ("remotely_deleted", doc_pair.id))
if doc_pair.folderish:
c.execute(
update + " " + self._get_recursive_remote_condition(doc_pair),
f"{update} {self._get_recursive_remote_condition(doc_pair)}",
("parent_remotely_deleted",),
)
# Only queue parent
Expand Down Expand Up @@ -781,16 +781,7 @@ def insert_local_state(
) -> int:
digest = None
if not info.folderish:
if is_large_file(info.size):
# We can't compute the digest of big files now as it will
# be done later when the entire file is fully copied.
# For instance, on my machine (32GB RAM, 8 cores, Intel NUC)
# it takes 23 minutes for 100 GB and 7 minute for 50 GB.
# This is way too much effort to compute it several times.
digest = UNACCESSIBLE_HASH
else:
digest = info.get_digest()

digest = UNACCESSIBLE_HASH if is_large_file(info.size) else info.get_digest()
with self.lock:
c = self._get_write_connection().cursor()
pair_state = PAIR_STATES[("created", "unknown")]
Expand Down Expand Up @@ -1406,7 +1397,7 @@ def remove_state(
condition = self._get_recursive_remote_condition(doc_pair)
else:
condition = self._get_recursive_condition(doc_pair)
c.execute("DELETE FROM States " + condition)
c.execute(f"DELETE FROM States {condition}")

Check warning on line 1400 in nxdrive/dao/engine.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/dao/engine.py#L1400

Added line #L1400 was not covered by tests

def remove_state_children(
self, doc_pair: DocPair, /, *, remote_recursion: bool = False
Expand All @@ -1417,7 +1408,7 @@ def remove_state_children(
condition = self._get_recursive_remote_condition(doc_pair)
else:
condition = self._get_recursive_condition(doc_pair)
c.execute("DELETE FROM States " + condition)
c.execute(f"DELETE FROM States {condition}")

Check warning on line 1411 in nxdrive/dao/engine.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/dao/engine.py#L1411

Added line #L1411 was not covered by tests

def get_state_from_local(self, path: Path, /) -> Optional[DocPair]:
c = self._get_read_connection().cursor()
Expand Down Expand Up @@ -1485,15 +1476,10 @@ def insert_remote_state(
def queue_children(self, row: DocPair, /) -> None:
with self.lock:
c = self._get_write_connection().cursor()
children: List[DocPair] = c.execute(
"SELECT *"
" FROM States"
" WHERE remote_parent_ref = ?"
" OR local_parent_path = ?"
" AND " + self._get_to_sync_condition(),
if children := c.execute(
f"SELECT * FROM States WHERE remote_parent_ref = ? OR local_parent_path = ? AND {self._get_to_sync_condition()}",
(row.remote_ref, row.local_path),
).fetchall()
if children:
).fetchall():
log.info(f"Queuing {len(children)} children of {row}")
for child in children:
self._queue_pair_state(child.id, child.folderish, child.pair_state)
Expand Down Expand Up @@ -1663,7 +1649,7 @@ def synchronize_state(
version,
),
)
result = bool(c.rowcount == 1)
result = c.rowcount == 1

# Retry without version for folder
if not result and row.folderish:
Expand Down Expand Up @@ -1694,7 +1680,7 @@ def synchronize_state(
row.remote_parent_ref,
),
)
result = bool(c.rowcount == 1)
result = c.rowcount == 1

if not result:
log.debug(f"Was not able to synchronize state: {row!r}")
Expand Down Expand Up @@ -1869,7 +1855,7 @@ def is_path_scanned(self, path: str, /) -> bool:
row = c.execute(
"SELECT COUNT(path) FROM RemoteScan WHERE path = ? LIMIT 1", (path,)
).fetchone()
return bool(row[0] > 0)
return row[0] > 0

def is_filter(self, path: str, /) -> bool:
path = self._clean_filter_path(path)
Expand Down
10 changes: 3 additions & 7 deletions nxdrive/engine/activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,7 @@ def get_current_action(*, thread_id: int = None) -> Optional["Action"]:

@staticmethod
def finish_action() -> None:
action = Action.actions.pop(current_thread_id(), None)
if action:
if action := Action.actions.pop(current_thread_id(), None):
action.finish()

def finish(self) -> None:
Expand All @@ -69,9 +68,7 @@ def export(self) -> Dict[str, Any]:
}

def __repr__(self) -> str:
if not self.progress:
return str(self.type)
return f"{self.type}({self.progress}%)"
return f"{self.type}({self.progress}%)" if self.progress else str(self.type)


class IdleAction(Action):
Expand Down Expand Up @@ -131,8 +128,7 @@ def _connect_reporter(self, reporter: Optional[QApplication], /) -> None:
return

for evt in ("started", "progressing", "done"):
signal = getattr(reporter, f"action_{evt}", None)
if signal:
if signal := getattr(reporter, f"action_{evt}", None):
getattr(self, evt).connect(signal)

@property
Expand Down
36 changes: 10 additions & 26 deletions nxdrive/gui/view.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,7 @@ def data(self, index: QModelIndex, role: int, /) -> str:

uid = self.engines_uid[index]
engine = self.application.manager.engines.get(uid)
if not engine:
return ""

return getattr(engine, self.names[role].decode())
return "" if not engine else getattr(engine, self.names[role].decode())

Check warning on line 87 in nxdrive/gui/view.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/gui/view.py#L87

Added line #L87 was not covered by tests

@pyqtSlot(int, str, result=str)
def get(self, index: int, role: str = "uid", /) -> str:
Expand All @@ -96,10 +93,7 @@ def get(self, index: int, role: str = "uid", /) -> str:

uid = self.engines_uid[index]
engine = self.application.manager.engines.get(uid)
if not engine:
return ""

return getattr(engine, role)
return "" if not engine else getattr(engine, role)

Check warning on line 96 in nxdrive/gui/view.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/gui/view.py#L96

Added line #L96 was not covered by tests

def removeRows(
self, row: int, count: int, /, *, parent: QModelIndex = QModelIndex()
Expand Down Expand Up @@ -496,23 +490,18 @@ def data(self, index: QModelIndex, role: int, /) -> Any:
elif role == self.CREATED_ON:
label = "STARTED"
args = []
datetime = get_date_from_sqlite(row["created_on"])
if datetime:
if datetime := get_date_from_sqlite(row["created_on"]):

Check warning on line 493 in nxdrive/gui/view.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/gui/view.py#L493

Added line #L493 was not covered by tests
label += "_ON"
# As date_time is in UTC
offset = tzlocal().utcoffset(datetime)
if offset:
if offset := tzlocal().utcoffset(datetime):

Check warning on line 495 in nxdrive/gui/view.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/gui/view.py#L495

Added line #L495 was not covered by tests
datetime += offset
args.append(Translator.format_datetime(datetime))
return self.tr(label, values=args)
elif role == self.COMPLETED_ON:
label = "COMPLETED" if row["status"].name == "DONE" else "CANCELLED"
args = []
datetime = get_date_from_sqlite(row["completed_on"])
if datetime:
if datetime := get_date_from_sqlite(row["completed_on"]):

Check warning on line 502 in nxdrive/gui/view.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/gui/view.py#L502

Added line #L502 was not covered by tests
label += "_ON"
offset = tzlocal().utcoffset(datetime)
if offset:
if offset := tzlocal().utcoffset(datetime):

Check warning on line 504 in nxdrive/gui/view.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/gui/view.py#L504

Added line #L504 was not covered by tests
datetime += offset
args.append(Translator.format_datetime(datetime))
return self.tr(label, values=args)
Expand Down Expand Up @@ -626,23 +615,18 @@ def data(self, index: QModelIndex, role: int, /) -> Any:
elif role == self.CREATED_ON:
label = "STARTED"
args = []
datetime = get_date_from_sqlite(row["created_on"])
if datetime:
if datetime := get_date_from_sqlite(row["created_on"]):

Check warning on line 618 in nxdrive/gui/view.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/gui/view.py#L618

Added line #L618 was not covered by tests
label += "_ON"
# As date_time is in UTC
offset = tzlocal().utcoffset(datetime)
if offset:
if offset := tzlocal().utcoffset(datetime):

Check warning on line 620 in nxdrive/gui/view.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/gui/view.py#L620

Added line #L620 was not covered by tests
datetime += offset
args.append(Translator.format_datetime(datetime))
return self.tr(label, values=args)
elif role == self.COMPLETED_ON:
label = "COMPLETED" if row["status"].name == "DONE" else "CANCELLED"
args = []
datetime = get_date_from_sqlite(row["completed_on"])
if datetime:
if datetime := get_date_from_sqlite(row["completed_on"]):

Check warning on line 627 in nxdrive/gui/view.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/gui/view.py#L627

Added line #L627 was not covered by tests
label += "_ON"
offset = tzlocal().utcoffset(datetime)
if offset:
if offset := tzlocal().utcoffset(datetime):

Check warning on line 629 in nxdrive/gui/view.py

View check run for this annotation

Codecov / codecov/patch

nxdrive/gui/view.py#L629

Added line #L629 was not covered by tests
datetime += offset
args.append(Translator.format_datetime(datetime))
return self.tr(label, values=args)
Expand Down
3 changes: 1 addition & 2 deletions tests/unit/test_client_uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,7 @@
def baseuploader():
remote = Remote
remote.dao = Mock()
baseuploader = BaseUploader(remote)
return baseuploader
return BaseUploader(remote)


def test_link_blob_to_doc(baseuploader, upload, tmp_path, monkeypatch):
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test_engine_dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ def test_migration_db_v10(engine_dao):
"""Verify Downloads after migration from v9 to v10."""
with engine_dao("engine_migration_10.db") as dao:
downloads = list(dao.get_downloads())
assert len(downloads) == 0
assert not downloads

states = list(dao.get_states_from_partial_local(Path()))
assert len(states) == 4
Expand Down

0 comments on commit f44f4df

Please sign in to comment.