From b90806ea1f904395713217429508b69eb31cda23 Mon Sep 17 00:00:00 2001 From: Anindya Roy Date: Thu, 14 Sep 2023 10:52:48 +0530 Subject: [PATCH 01/24] 14-sept --- nxdrive/client/uploader/__init__.py | 12 +++++++++++ nxdrive/client/uploader/direct_transfer.py | 6 ++++++ nxdrive/dao/engine.py | 6 ++++++ nxdrive/engine/activity.py | 1 + nxdrive/engine/processor.py | 19 +++++++++++----- nxdrive/engine/queue_manager.py | 2 ++ nxdrive/engine/workers.py | 4 ++++ nxdrive/gui/view.py | 25 ++++++++++++++++++++++ 8 files changed, 70 insertions(+), 5 deletions(-) diff --git a/nxdrive/client/uploader/__init__.py b/nxdrive/client/uploader/__init__.py index 812fa719e7..9408282e34 100644 --- a/nxdrive/client/uploader/__init__.py +++ b/nxdrive/client/uploader/__init__.py @@ -427,6 +427,8 @@ def link_blob_to_doc( # type: ignore[return] ) -> Dict[str, Any]: """Link the given uploaded *blob* to the given document.""" + log.info("^^^^^^^^^^^^^^^^^ link_blob_to_doc ") + headers = {"Nuxeo-Transaction-Timeout": str(TX_TIMEOUT)} if transfer.request_uid: headers[IDEMPOTENCY_KEY] = transfer.request_uid @@ -444,17 +446,27 @@ def link_blob_to_doc( # type: ignore[return] engine=transfer.engine, doc_pair=transfer.doc_pair, ) + log.info(f"^^^^^^^^^^^^^^^^^ link_blob_to_doc action: {action!r}") action.is_direct_transfer = transfer.is_direct_transfer if "headers" in kwargs: kwargs["headers"].update(headers) else: kwargs["headers"] = headers try: + doc_type = kwargs.get("doc_type", "") if transfer.is_direct_transfer and doc_type and doc_type != "": + log.info( + f"&&&&&&1111 _transfer_docType_file transfer: {transfer!r}, \ + headers: {headers!r}, doc_type: {doc_type!r}" + ) res = self._transfer_docType_file(transfer, headers, doc_type) else: + log.info( + f"&&&&&&222 _transfer_autoType_file2 command: {command!r}, blob: {blob!r}, kwargs: {kwargs!r}" + ) res = self._transfer_autoType_file(command, blob, kwargs) + log.info(f"^^^^^^^^^^^^^^^^^ link_blob_to_doc res: {res!r}") return res except Exception as exc: diff --git a/nxdrive/client/uploader/direct_transfer.py b/nxdrive/client/uploader/direct_transfer.py index 95143edfe5..0cd712a3e7 100644 --- a/nxdrive/client/uploader/direct_transfer.py +++ b/nxdrive/client/uploader/direct_transfer.py @@ -79,6 +79,7 @@ def upload( if doc_pair.folderish: if not doc_pair.doc_type: + log.info("------ not doc_pair.doc_type") item = self.remote.upload_folder( doc_pair.remote_parent_path, {"title": doc_pair.local_name}, @@ -86,6 +87,7 @@ def upload( ) else: try: + log.info("------ else") payload = { "entity-type": "document", "name": doc_pair.local_name, @@ -100,6 +102,7 @@ def upload( filepath = f"{doc_pair.remote_parent_path}/{doc_pair.local_name}" item = self.remote.fetch(filepath) except NotFound: + log.info("------ inside if--> else except") raise NotFound( f"Could not find {filepath!r} on {self.remote.client.host}" ) @@ -107,6 +110,7 @@ def upload( else: # Only replace the document if the user wants to overwrite = doc_pair.duplicate_behavior == "override" + log.info("----- else-->") # Upload the blob and use the FileManager importer to create the document item = super().upload_impl( @@ -132,5 +136,7 @@ def upload( ) }, ) + log.info("-----------calling FileManager.Import inside else") + log.info("-----------calling FileManager.Import") self.dao.save_session_item(doc_pair.session, item) return item diff --git a/nxdrive/dao/engine.py b/nxdrive/dao/engine.py index 38705b6795..db139ac31c 100644 --- a/nxdrive/dao/engine.py +++ b/nxdrive/dao/engine.py @@ -1501,6 +1501,7 @@ def queue_children(self, row: DocPair, /) -> None: def increase_error( self, row: DocPair, error: str, /, *, details: str = None, incr: int = 1 ) -> None: + log.info(f"increase_error DB -->> error: {error!r}") with self.lock: error_date = datetime.utcnow() c = self._get_write_connection().cursor() @@ -2549,6 +2550,11 @@ def set_transfer_status( f"UPDATE {table} SET status = ? WHERE uid = ?", (transfer.status.value, transfer.uid), ) + log.info( + f">>>>>>>>>>>>>>> updating table {table!r} \ + with transfer.status.value: {transfer.status.value!r} \ + and transfer.uid: {transfer.uid!r}" + ) self.directTransferUpdated.emit() def remove_transfer( diff --git a/nxdrive/engine/activity.py b/nxdrive/engine/activity.py index d43e459869..f268b935f7 100644 --- a/nxdrive/engine/activity.py +++ b/nxdrive/engine/activity.py @@ -55,6 +55,7 @@ def get_current_action(*, thread_id: int = None) -> Optional["Action"]: @staticmethod def finish_action() -> None: action = Action.actions.pop(current_thread_id(), None) + if action: action.finish() diff --git a/nxdrive/engine/processor.py b/nxdrive/engine/processor.py index 209cdc5fef..a90dd36d20 100644 --- a/nxdrive/engine/processor.py +++ b/nxdrive/engine/processor.py @@ -253,7 +253,7 @@ def _handle_doc_pair_sync(self, doc_pair: DocPair, sync_handler: Callable) -> No def _handle_doc_pair_dt(self, doc_pair: DocPair, sync_handler: Callable) -> None: """Actions to be done to handle a Direct Transfer item. Called by ._execute().""" - log.debug(f"Calling {sync_handler.__name__}()") + log.debug(f"...Calling {sync_handler.__name__}()") try: sync_handler(doc_pair) except NotFound: @@ -360,12 +360,14 @@ def _execute(self) -> None: log.info(f"{type(exc).__name__}, wait 1s and requeue") sleep(1) self.engine.queue_manager.push(doc_pair) - except CONNECTION_ERROR: + except CONNECTION_ERROR as exc: # TODO: # Add detection for server unavailability to stop all sync # instead of putting files in error log.debug("Connection issue", exc_info=True) - self._postpone_pair(doc_pair, "CONNECTION_ERROR") + # self._direct_transfer_cancel(doc_pair) + self.increase_error(doc_pair, "CONNECTION_ERROR", exception=exc) + # self._postpone_pair(doc_pair, "CONNECTION_ERROR") except MaxRetryError: log.warning("Connection retries issue", exc_info=True) self._postpone_pair(doc_pair, "MAX_RETRY_ERROR") @@ -379,12 +381,14 @@ def _execute(self) -> None: log.warning("Delaying conflicted document") self._postpone_pair(doc_pair, "Conflict") except HTTPError as exc: + log.info(f"HTTP ERROR: {exc!r}") if exc.status == 404: # We saw it happened once a migration is done. # Nuxeo kept the document reference but it does # not exist physically anywhere. log.info("The document does not exist anymore") - self.dao.remove_state(doc_pair) + # self.increase_error(doc_pair, "SERVER_ERROR", exception=exc) + # self.dao.remove_state(doc_pair) elif exc.status == 416: log.warning("Invalid downloaded temporary file") tmp_folder = ( @@ -393,8 +397,11 @@ def _execute(self) -> None: with suppress(FileNotFoundError): shutil.rmtree(tmp_folder) self._postpone_pair(doc_pair, "Requested Range Not Satisfiable") - elif exc.status in (405, 408, 500): + elif exc.status in (405, 408): self.increase_error(doc_pair, "SERVER_ERROR", exception=exc) + elif exc.status == 500: + log.info(f"Encountered error: {exc!r}") + self._direct_transfer_cancel(doc_pair) elif exc.status in (502, 503, 504): log.warning("Server is unavailable", exc_info=True) self._check_exists_on_the_server(doc_pair) @@ -402,6 +409,7 @@ def _execute(self) -> None: error = f"{handler_name}_http_error_{exc.status}" self._handle_pair_handler_exception(doc_pair, error, exc) except UploadError as exc: + log.info(f"Upload ERROR: {exc!r}") exc_info = True if "ExpiredToken" in exc.info: # It happens to non-chunked uploads, it is safe to restart the upload completely @@ -418,6 +426,7 @@ def _execute(self) -> None: ) self._postpone_pair(doc_pair, "Upload") except (DownloadPaused, UploadPaused) as exc: + log.info(f"DownloadPaused/ UploadPaused: {exc!r}") nature = "download" if isinstance(exc, DownloadPaused) else "upload" log.info(f"Pausing {nature} {exc.transfer_id!r}") self.engine.dao.set_transfer_doc( diff --git a/nxdrive/engine/queue_manager.py b/nxdrive/engine/queue_manager.py index bf5ae24511..71a69908af 100644 --- a/nxdrive/engine/queue_manager.py +++ b/nxdrive/engine/queue_manager.py @@ -215,6 +215,7 @@ def push(self, state: Union[DocPair, QueueItem], /) -> None: @pyqtSlot() def _on_error_timer(self) -> None: + log.info("++++++++ _on_error_timer") with self._error_lock: cur_time = int(time.time()) for doc_pair in self._on_error_queue.copy().values(): @@ -244,6 +245,7 @@ def get_error_threshold(self) -> int: def push_error( self, doc_pair: DocPair, /, *, exception: Exception = None, interval: int = None ) -> None: + log.info(f"--->>> push_error({doc_pair!r}, {exception!r})") error_count = doc_pair.error_count err_code = WINERROR_CODE_PROCESS_CANNOT_ACCESS_FILE emit_sig = doc_pair.id not in self._on_error_queue diff --git a/nxdrive/engine/workers.py b/nxdrive/engine/workers.py index 62d8cb8b30..f9d2393ca2 100644 --- a/nxdrive/engine/workers.py +++ b/nxdrive/engine/workers.py @@ -233,9 +233,13 @@ def giveup_error( def increase_error( self, doc_pair: DocPair, error: str, /, *, exception: Exception = None ) -> None: + log.info(f"increase_error -->> error: {error!r}") details = str(exception) if exception else None log.info(f"Increasing error [{error}] ({details}) for {doc_pair!r}") self.dao.increase_error(doc_pair, error, details=details) + log.info( + f"--->>> self.engine.queue_manager.push_error({doc_pair!r}, {exception!r})" + ) self.engine.queue_manager.push_error(doc_pair, exception=exception) def remove_void_transfers(self, doc_pair: DocPair, /) -> None: diff --git a/nxdrive/gui/view.py b/nxdrive/gui/view.py index 98882c34c8..39972fce38 100755 --- a/nxdrive/gui/view.py +++ b/nxdrive/gui/view.py @@ -1,4 +1,5 @@ from functools import partial +from logging import getLogger from typing import TYPE_CHECKING, Any, Callable, Dict, List, Tuple from dateutil.tz import tzlocal @@ -18,6 +19,8 @@ from ..translator import Translator from ..utils import force_decode, get_date_from_sqlite, sizeof_fmt +log = getLogger(__name__) + if TYPE_CHECKING: from .application import Application # noqa from ..engine.engine import Engine # noqa @@ -358,22 +361,44 @@ def data(self, index: QModelIndex, role: int, /) -> Any: def setData(self, index: QModelIndex, value: Any, /, *, role: int = None) -> None: if role is None: return + log.info( + f"************************* inside setData: index: {index!r}, value: {value!r}, role: {role!r}" + ) key = force_decode(self.roleNames()[role]) + log.info(f"************************* inside setData: key: {key!r}") self.items[index.row()][key] = value + log.info( + f"************************* inside setData: self.items: {self.items!r}" + ) + log.info( + f"************************* inside setData: self.dataChanged.emit({index!r}, {index!r}, {[role]!r})" + ) self.dataChanged.emit(index, index, [role]) @pyqtSlot(dict) def set_progress(self, action: Dict[str, Any], /) -> None: + log.info(f"-++++++++++++++++++++++++++++++++ {self.items}") for i, item in enumerate(self.items): + log.info(f"-------------------------- {i},-->>>> {item}") + log.info(f"-------------------------- {action}") if ( item["engine"] != action["engine"] or item["doc_pair"] != action["doc_pair"] ): + log.info("+++++++++++++++++++++++++++++++ continue") continue idx = self.createIndex(i, 0) self.setData(idx, action["progress"], role=self.PROGRESS) self.setData(idx, action["progress"], role=self.TRANSFERRED) if action["action_type"] == "Linking": + import datetime + + current_time = datetime.datetime.now() + log.info("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^") + log.info( + f"Finalizing the Upload. Last status received from server at {current_time}" + ) + log.info("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^") self.setData(idx, True, role=self.FINALIZING) def add_item(self, parent: QModelIndex, n_item: Dict[str, Any], /) -> None: From ea94e57389f4dcd3eeb462e438a0de58e683466a Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Fri, 22 Sep 2023 23:15:26 +0530 Subject: [PATCH 02/24] NXDRIVE-2711:Show thta upload is still alive for very large file --- nxdrive/client/uploader/__init__.py | 37 +++++++++++++++++++ nxdrive/client/uploader/direct_transfer.py | 2 +- nxdrive/dao/__init__.py | 1 + nxdrive/dao/engine.py | 8 +++- .../engine/0023_initial_migration.py | 34 +++++++++++++++++ nxdrive/dao/migrations/engine/__init__.py | 6 ++- nxdrive/data/i18n/i18n.json | 1 + nxdrive/data/qml/TransferItem.qml | 8 ++++ nxdrive/engine/activity.py | 28 +++++++++++++- nxdrive/gui/application.py | 1 + nxdrive/gui/view.py | 15 ++++++++ nxdrive/objects.py | 1 + 12 files changed, 137 insertions(+), 5 deletions(-) create mode 100644 nxdrive/dao/migrations/engine/0023_initial_migration.py diff --git a/nxdrive/client/uploader/__init__.py b/nxdrive/client/uploader/__init__.py index 9408282e34..7417288e9a 100644 --- a/nxdrive/client/uploader/__init__.py +++ b/nxdrive/client/uploader/__init__.py @@ -144,6 +144,7 @@ def _get_transfer( remote_parent_path=kwargs.pop("remote_parent_path", ""), remote_parent_ref=kwargs.pop("remote_parent_ref", ""), doc_pair=kwargs.pop("doc_pair", None), + transfer_status="testing1", ) # Inject the request UID @@ -226,6 +227,7 @@ def upload_impl( else: try: self.upload_chunks(transfer, blob, chunked) + log.info(f"########### upload_chunk: {transfer}") finally: if blob.fd: blob.fd.close() @@ -342,10 +344,12 @@ def upload_chunks(self, transfer: Upload, blob: FileBlob, chunked: bool, /) -> N for _ in uploader.iter_upload(): # Ensure the batchId will not be purged while uploading the content last_ping = self._ping_batch_id(transfer, last_ping) + log.debug(f">>>>>>> upload_chunks, last_ping: {last_ping}") action.progress = action.chunk_size * len( uploader.blob.uploadedChunkIds ) + log.debug(f">>>>>> action.progress: {action.progress}") # Save the progression transfer.progress = action.get_percent() @@ -394,6 +398,7 @@ def upload_chunks(self, transfer: Upload, blob: FileBlob, chunked: bool, /) -> N log.warning(err) raise HTTPError(status=500, message=err) from exc finally: + log.debug(f">>>> action: finish action {action.finish_action}") action.finish_action() def _link_blob_to_doc( @@ -466,11 +471,43 @@ def link_blob_to_doc( # type: ignore[return] f"&&&&&&222 _transfer_autoType_file2 command: {command!r}, blob: {blob!r}, kwargs: {kwargs!r}" ) res = self._transfer_autoType_file(command, blob, kwargs) + + """link_progress = True + while link_progress: + # api call + #time.sleep(3) + res = self.remote.client.request( + "GET", + f"{self.remote.client.api_path}/upload/{transfer.batch_obj.batchId}/0", + headers=headers, + ssl_verify=self.verification_needed, + ) + if res.status_code == 202: + transfer.transfer_status = str(datetime.datetime.now()) # noqa + if res.status_code != 404: + t = f"Upload is in progress. Last updated time: {datetime.datetime.now()}" + print(f">>>>>> time from api: {t}") + action.transfer_status = t + continue + link_progress = False""" + # transfer.transfer_status = "Linking Done" + # condition for error case + # self.dao.update_upload(transfer) + + """for x in range(4): + time.sleep(3) + t = f"{datetime.datetime.now()}" + print(f">>>>>> time from api: {t}") + action.transfer_status = t""" + log.info(f"^^^^^^^^^^^^^^^^^ link_blob_to_doc res: {res!r}") return res except Exception as exc: err = f"Error while linking blob to doc: {exc!r}" + transfer.status = TransferStatus.CANCELLED + self.dao.set_transfer_status("uploads", transfer) + action.transfer_status = "Error" log.warning(err) finally: action.finish_action() diff --git a/nxdrive/client/uploader/direct_transfer.py b/nxdrive/client/uploader/direct_transfer.py index 0cd712a3e7..a6260b8545 100644 --- a/nxdrive/client/uploader/direct_transfer.py +++ b/nxdrive/client/uploader/direct_transfer.py @@ -110,7 +110,7 @@ def upload( else: # Only replace the document if the user wants to overwrite = doc_pair.duplicate_behavior == "override" - log.info("----- else-->") + log.debug("----- else-->") # Upload the blob and use the FileManager importer to create the document item = super().upload_impl( diff --git a/nxdrive/dao/__init__.py b/nxdrive/dao/__init__.py index b00f25f201..45757ff42d 100644 --- a/nxdrive/dao/__init__.py +++ b/nxdrive/dao/__init__.py @@ -13,4 +13,5 @@ versions_history = { "5.2.8": 21, "5.3.0": 22, + "5.3.3": 23, } diff --git a/nxdrive/dao/engine.py b/nxdrive/dao/engine.py index db139ac31c..95c678ced7 100644 --- a/nxdrive/dao/engine.py +++ b/nxdrive/dao/engine.py @@ -530,6 +530,9 @@ def _migrate_db_old(self, cursor: Cursor, version: int, /) -> None: if version < 22: self.store_int(SCHEMA_VERSION, 22) self.set_schema_version(cursor, 22) + if version < 23: + self.store_int(SCHEMA_VERSION, 23) + self.set_schema_version(cursor, 23) def _create_table( self, cursor: Cursor, name: str, /, *, force: bool = False @@ -2354,14 +2357,15 @@ def save_dt_upload(self, upload: Upload, /) -> None: upload.remote_parent_ref, upload.doc_pair, upload.request_uid, + upload.transfer_status, ) c = self._get_write_connection().cursor() sql = ( "INSERT INTO Uploads " "(path, status, engine, is_direct_edit, is_direct_transfer, filesize, batch, chunk_size," - " remote_parent_path, remote_parent_ref, doc_pair, request_uid)" + " remote_parent_path, remote_parent_ref, doc_pair, request_uid, transfer_status)" " VALUES (?, IFNULL((SELECT s.status FROM States st INNER JOIN Sessions s ON st.session = s.uid " - "AND st.id = ?), ?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" + "AND st.id = ?), ?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" ) c.execute(sql, values) diff --git a/nxdrive/dao/migrations/engine/0023_initial_migration.py b/nxdrive/dao/migrations/engine/0023_initial_migration.py new file mode 100644 index 0000000000..667df8acd4 --- /dev/null +++ b/nxdrive/dao/migrations/engine/0023_initial_migration.py @@ -0,0 +1,34 @@ +from sqlite3 import Cursor + +from ..migration import MigrationInterface + + +class MigrationInitial(MigrationInterface): + def upgrade(self, cursor: Cursor) -> None: + """ + Update the Uploads table. + """ + self._update_uploads_table(cursor) + + def downgrade(self, cursor: Cursor) -> None: + """Update the Uploads table.""" + # Drop Column transfer_status from Uploads table + cursor.execute("ALTER TABLE Uploads DROP COLUMN transfer_status") + + @property + def version(self) -> int: + return 23 + + @property + def previous_version(self) -> int: + return 22 + + @staticmethod + def _update_uploads_table(cursor: Cursor) -> None: + """Update the Uploads table.""" + cursor.execute( + "ALTER TABLE Uploads ADD transfer_status VARCHAR DEFAULT ('testing')" + ) + + +migration = MigrationInitial() diff --git a/nxdrive/dao/migrations/engine/__init__.py b/nxdrive/dao/migrations/engine/__init__.py index ece242d0d2..4e86f03354 100644 --- a/nxdrive/dao/migrations/engine/__init__.py +++ b/nxdrive/dao/migrations/engine/__init__.py @@ -1,7 +1,11 @@ import importlib from typing import Any, Dict -__migrations_list = ["0021_initial_migration", "0022_initial_migration"] # Keep sorted +__migrations_list = [ + "0021_initial_migration", + "0022_initial_migration", + "0023_initial_migration", +] # Keep sorted def import_migrations() -> Dict[str, Any]: diff --git a/nxdrive/data/i18n/i18n.json b/nxdrive/data/i18n/i18n.json index 2962170574..25ab0c70d1 100644 --- a/nxdrive/data/i18n/i18n.json +++ b/nxdrive/data/i18n/i18n.json @@ -104,6 +104,7 @@ "DIRECT_TRANSFER_CANCEL": "Do you confirm the cancellation of the transfer of \"%1\"?", "DIRECT_TRANSFER_CANCEL_HEADER": "A transfer cancellation has been asked.", "DIRECT_TRANSFER_DETAILS": "[%1%] %2 of %3", + "DIRECT_TRANSFER_FINALIZING_DETAILS": "Upload is in progress. Last updated time: %1", "DIRECT_TRANSFER_END": "Transfer done: \"%1\"", "DIRECT_TRANSFER_ERROR": "Transfer error: \"%1\"", "DIRECT_TRANSFER_NO_ACCOUNT": "Cannot use the Direct Transfer feature with no account, aborting.", diff --git a/nxdrive/data/qml/TransferItem.qml b/nxdrive/data/qml/TransferItem.qml index eb030950c1..a6049ff124 100644 --- a/nxdrive/data/qml/TransferItem.qml +++ b/nxdrive/data/qml/TransferItem.qml @@ -77,5 +77,13 @@ Rectangle { } } } + + ScaledText { + text: qsTr("DIRECT_TRANSFER_FINALIZING_DETAILS").arg(transfer_status) + tl.tr + color: secondaryText + visible: finalizing && transfer_status + Layout.leftMargin: icon.width + 5 + font.pointSize: point_size * 0.8 + } } } diff --git a/nxdrive/engine/activity.py b/nxdrive/engine/activity.py index f268b935f7..99e03ecbba 100644 --- a/nxdrive/engine/activity.py +++ b/nxdrive/engine/activity.py @@ -19,11 +19,14 @@ class Action(QObject): actions: Dict[int, Optional["Action"]] = {} - def __init__(self, action_type: str, /, *, progress: float = 0.0) -> None: + def __init__( + self, action_type: str, /, *, progress: float = 0.0, transfer_status: str = "" + ) -> None: super().__init__() self.type = action_type self._progress = progress + self._transfer_status = transfer_status self.size = 0 self.uid = str(uuid.uuid4()) @@ -43,6 +46,14 @@ def progress(self, value: float, /) -> None: def get_percent(self) -> float: return self.progress + @property + def transfer_status(self) -> str: + return self._transfer_status + + @transfer_status.setter + def transfer_status(self, value: str, /) -> None: + self._transfer_status = value + @staticmethod def get_actions() -> Dict[int, Optional["Action"]]: return Action.actions.copy() @@ -67,6 +78,7 @@ def export(self) -> Dict[str, Any]: "uid": self.uid, "action_type": self.type, "progress": self.get_percent(), + "transfer_status": self.transfer_status, } def __repr__(self) -> str: @@ -123,6 +135,9 @@ def __init__( # Used to know if the file is a Direct Transfer item self.is_direct_transfer = False + """a = str(datetime.datetime.now()) + print(f">>>>>> a:: {a}") + self.transfer_status = a""" self._connect_reporter(reporter) self.started.emit(self) @@ -150,6 +165,15 @@ def progress(self, value: float, /) -> None: self.progressing.emit(self) + @property + def transfer_status(self) -> str: + return self._transfer_status + + @transfer_status.setter + def transfer_status(self, value: str, /) -> None: + self._transfer_status = value + self.progressing.emit(self) + def get_percent(self) -> float: if self.size < 0 or (self.empty and not self.uploaded): return 0.0 @@ -258,6 +282,8 @@ def __init__( doc_pair=doc_pair, ) self.progress = size + """import datetime + self.transfer_status = str(datetime.datetime.now())""" def tooltip(doing: str): # type: ignore diff --git a/nxdrive/gui/application.py b/nxdrive/gui/application.py index 609768aed5..6af2c290c0 100644 --- a/nxdrive/gui/application.py +++ b/nxdrive/gui/application.py @@ -1875,6 +1875,7 @@ def refresh_direct_transfer_items(self, dao: EngineDAO, /) -> None: for transfer in transfers: if transfer["doc_pair"] in pair_finalizing: transfer["finalizing"] = True + log.debug(f">>>>>>>> finalizing: True for {transfers}") self.direct_transfer_model.update_items(transfers) @pyqtSlot(object) diff --git a/nxdrive/gui/view.py b/nxdrive/gui/view.py index 39972fce38..7679c84824 100755 --- a/nxdrive/gui/view.py +++ b/nxdrive/gui/view.py @@ -155,6 +155,7 @@ class TransferModel(QAbstractListModel): IS_DIRECT_EDIT = qt.UserRole + 7 FINALIZING = qt.UserRole + 8 PROGRESS_METRICS = qt.UserRole + 9 + # FINALIZING_MSG = qt.UserRole + 10 def __init__(self, translate: Callable, /, *, parent: QObject = None) -> None: super().__init__(parent) @@ -172,6 +173,7 @@ def __init__(self, translate: Callable, /, *, parent: QObject = None) -> None: # The is the Verification step for downloads # and Linking step for uploads. self.FINALIZING: b"finalizing", + # self.FINALIZING_MSG: b"test_val", } def rowCount(self, parent: QModelIndex = QModelIndex(), /) -> int: @@ -230,6 +232,9 @@ def data(self, index: QModelIndex, role: int, /) -> Any: return row.get("finalizing", False) if role == self.PROGRESS_METRICS: return self.get_progress(row) + """if role == self.FINALIZING_MSG: + if row.get("finalizing", False) == True: + return str(datetime.datetime.now())""" return row[self.names[role].decode()] def setData(self, index: QModelIndex, value: Any, /, *, role: int = None) -> None: @@ -254,8 +259,11 @@ def set_progress(self, action: Dict[str, Any], /) -> None: self.setData(idx, action["progress"], role=self.PROGRESS) self.setData(idx, action["progress"], role=self.PROGRESS_METRICS) + log.info(f'>>>>>>> idx {idx}, {action["progress"]}, {self.PROGRESS}') + log.debug(f'>>>>>>> idx {idx}, {action["progress"]}, {self.PROGRESS}') if action["action_type"] in ("Linking", "Verification"): self.setData(idx, True, role=self.FINALIZING) + # self.setData(idx, True, role=self.FINALIZING_MSG) def flags(self, index: QModelIndex, /) -> Qt.ItemFlags: return qt.ItemIsEditable | qt.ItemIsEnabled | qt.ItemIsSelectable @@ -276,6 +284,7 @@ class DirectTransferModel(QAbstractListModel): REMOTE_PARENT_REF = qt.UserRole + 10 SHADOW = qt.UserRole + 11 # Tell the interface if the row should be visible or not DOC_PAIR = qt.UserRole + 12 + FINALIZING_MSG = qt.UserRole + 13 def __init__(self, translate: Callable, /, *, parent: QObject = None) -> None: super().__init__(parent) @@ -294,6 +303,7 @@ def __init__(self, translate: Callable, /, *, parent: QObject = None) -> None: self.REMOTE_PARENT_REF: b"remote_parent_ref", self.SHADOW: b"shadow", self.DOC_PAIR: b"doc_pair", + self.FINALIZING_MSG: b"transfer_status", } # Pretty print self.psize = partial(sizeof_fmt, suffix=self.tr("BYTE_ABBREV")) @@ -356,6 +366,10 @@ def data(self, index: QModelIndex, role: int, /) -> Any: return self.psize(row["filesize"]) if role == self.TRANSFERRED: return self.psize(row["filesize"] * row["progress"] / 100) + if role == self.FINALIZING_MSG: + a = row.get("transfer_status") + log.debug(f">>>>>>>> time: {a}") + return a return row[self.names[role].decode()] def setData(self, index: QModelIndex, value: Any, /, *, role: int = None) -> None: @@ -400,6 +414,7 @@ def set_progress(self, action: Dict[str, Any], /) -> None: ) log.info("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^") self.setData(idx, True, role=self.FINALIZING) + self.setData(idx, action["transfer_status"], role=self.FINALIZING_MSG) def add_item(self, parent: QModelIndex, n_item: Dict[str, Any], /) -> None: """Add an item to existing list.""" diff --git a/nxdrive/objects.py b/nxdrive/objects.py index da39956606..5c1b7662b8 100644 --- a/nxdrive/objects.py +++ b/nxdrive/objects.py @@ -488,6 +488,7 @@ class Upload(Transfer): batch_obj: Batch = None request_uid: Optional[str] = None is_dirty: bool = field(init=False, default=False) + transfer_status: str = "" def token_callback(self, batch: Batch, _: Dict[str, Any]) -> None: """Callback triggered when token is refreshed.""" From 273e923db1f0e4789a6e68b57e6ed1aed5dd1760 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Fri, 22 Sep 2023 23:49:16 +0530 Subject: [PATCH 03/24] NXDRIVE-2711:Show thta upload is still alive for very large file --- nxdrive/client/uploader/__init__.py | 7 ++-- nxdrive/dao/__init__.py | 1 - nxdrive/dao/engine.py | 8 ++--- .../engine/0023_initial_migration.py | 34 ------------------- nxdrive/dao/migrations/engine/__init__.py | 6 +--- nxdrive/objects.py | 1 - 6 files changed, 7 insertions(+), 50 deletions(-) delete mode 100644 nxdrive/dao/migrations/engine/0023_initial_migration.py diff --git a/nxdrive/client/uploader/__init__.py b/nxdrive/client/uploader/__init__.py index 7417288e9a..4985b57983 100644 --- a/nxdrive/client/uploader/__init__.py +++ b/nxdrive/client/uploader/__init__.py @@ -1,7 +1,9 @@ """ Uploader used by the Remote client for all upload stuff. """ +import datetime import json +import time from abc import abstractmethod from logging import getLogger from pathlib import Path @@ -144,7 +146,6 @@ def _get_transfer( remote_parent_path=kwargs.pop("remote_parent_path", ""), remote_parent_ref=kwargs.pop("remote_parent_ref", ""), doc_pair=kwargs.pop("doc_pair", None), - transfer_status="testing1", ) # Inject the request UID @@ -494,11 +495,11 @@ def link_blob_to_doc( # type: ignore[return] # condition for error case # self.dao.update_upload(transfer) - """for x in range(4): + for x in range(4): time.sleep(3) t = f"{datetime.datetime.now()}" print(f">>>>>> time from api: {t}") - action.transfer_status = t""" + action.transfer_status = t log.info(f"^^^^^^^^^^^^^^^^^ link_blob_to_doc res: {res!r}") diff --git a/nxdrive/dao/__init__.py b/nxdrive/dao/__init__.py index 45757ff42d..b00f25f201 100644 --- a/nxdrive/dao/__init__.py +++ b/nxdrive/dao/__init__.py @@ -13,5 +13,4 @@ versions_history = { "5.2.8": 21, "5.3.0": 22, - "5.3.3": 23, } diff --git a/nxdrive/dao/engine.py b/nxdrive/dao/engine.py index 95c678ced7..db139ac31c 100644 --- a/nxdrive/dao/engine.py +++ b/nxdrive/dao/engine.py @@ -530,9 +530,6 @@ def _migrate_db_old(self, cursor: Cursor, version: int, /) -> None: if version < 22: self.store_int(SCHEMA_VERSION, 22) self.set_schema_version(cursor, 22) - if version < 23: - self.store_int(SCHEMA_VERSION, 23) - self.set_schema_version(cursor, 23) def _create_table( self, cursor: Cursor, name: str, /, *, force: bool = False @@ -2357,15 +2354,14 @@ def save_dt_upload(self, upload: Upload, /) -> None: upload.remote_parent_ref, upload.doc_pair, upload.request_uid, - upload.transfer_status, ) c = self._get_write_connection().cursor() sql = ( "INSERT INTO Uploads " "(path, status, engine, is_direct_edit, is_direct_transfer, filesize, batch, chunk_size," - " remote_parent_path, remote_parent_ref, doc_pair, request_uid, transfer_status)" + " remote_parent_path, remote_parent_ref, doc_pair, request_uid)" " VALUES (?, IFNULL((SELECT s.status FROM States st INNER JOIN Sessions s ON st.session = s.uid " - "AND st.id = ?), ?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" + "AND st.id = ?), ?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" ) c.execute(sql, values) diff --git a/nxdrive/dao/migrations/engine/0023_initial_migration.py b/nxdrive/dao/migrations/engine/0023_initial_migration.py deleted file mode 100644 index 667df8acd4..0000000000 --- a/nxdrive/dao/migrations/engine/0023_initial_migration.py +++ /dev/null @@ -1,34 +0,0 @@ -from sqlite3 import Cursor - -from ..migration import MigrationInterface - - -class MigrationInitial(MigrationInterface): - def upgrade(self, cursor: Cursor) -> None: - """ - Update the Uploads table. - """ - self._update_uploads_table(cursor) - - def downgrade(self, cursor: Cursor) -> None: - """Update the Uploads table.""" - # Drop Column transfer_status from Uploads table - cursor.execute("ALTER TABLE Uploads DROP COLUMN transfer_status") - - @property - def version(self) -> int: - return 23 - - @property - def previous_version(self) -> int: - return 22 - - @staticmethod - def _update_uploads_table(cursor: Cursor) -> None: - """Update the Uploads table.""" - cursor.execute( - "ALTER TABLE Uploads ADD transfer_status VARCHAR DEFAULT ('testing')" - ) - - -migration = MigrationInitial() diff --git a/nxdrive/dao/migrations/engine/__init__.py b/nxdrive/dao/migrations/engine/__init__.py index 4e86f03354..ece242d0d2 100644 --- a/nxdrive/dao/migrations/engine/__init__.py +++ b/nxdrive/dao/migrations/engine/__init__.py @@ -1,11 +1,7 @@ import importlib from typing import Any, Dict -__migrations_list = [ - "0021_initial_migration", - "0022_initial_migration", - "0023_initial_migration", -] # Keep sorted +__migrations_list = ["0021_initial_migration", "0022_initial_migration"] # Keep sorted def import_migrations() -> Dict[str, Any]: diff --git a/nxdrive/objects.py b/nxdrive/objects.py index 5c1b7662b8..da39956606 100644 --- a/nxdrive/objects.py +++ b/nxdrive/objects.py @@ -488,7 +488,6 @@ class Upload(Transfer): batch_obj: Batch = None request_uid: Optional[str] = None is_dirty: bool = field(init=False, default=False) - transfer_status: str = "" def token_callback(self, batch: Batch, _: Dict[str, Any]) -> None: """Callback triggered when token is refreshed.""" From 915f44d27e16e62964e26b9135a2c8460037a902 Mon Sep 17 00:00:00 2001 From: Sweta Yadav Date: Tue, 26 Sep 2023 12:15:17 +0530 Subject: [PATCH 04/24] NXDRIVE-2711: Show that upload is still alive for very large files --- nxdrive/client/uploader/__init__.py | 33 +++++++++++++++-------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/nxdrive/client/uploader/__init__.py b/nxdrive/client/uploader/__init__.py index 4985b57983..6272e98a19 100644 --- a/nxdrive/client/uploader/__init__.py +++ b/nxdrive/client/uploader/__init__.py @@ -473,41 +473,42 @@ def link_blob_to_doc( # type: ignore[return] ) res = self._transfer_autoType_file(command, blob, kwargs) - """link_progress = True + link_progress = True while link_progress: # api call - #time.sleep(3) + time.sleep(5) res = self.remote.client.request( - "GET", - f"{self.remote.client.api_path}/upload/{transfer.batch_obj.batchId}/0", - headers=headers, - ssl_verify=self.verification_needed, - ) - if res.status_code == 202: - transfer.transfer_status = str(datetime.datetime.now()) # noqa + "GET", + f"{self.remote.client.api_path}/upload/{transfer.batch_obj.batchId}/0", + headers=headers, + ssl_verify=self.verification_needed, + ) + # if res.status_code == 202: + # transfer.transfer_status = str(datetime.datetime.now()) # noqa + print(f">>>> status code: {res.status_code}") if res.status_code != 404: - t = f"Upload is in progress. Last updated time: {datetime.datetime.now()}" + t = f"{datetime.datetime.now()}" print(f">>>>>> time from api: {t}") - action.transfer_status = t + action.transfer_status = str(t) continue - link_progress = False""" + link_progress = False # transfer.transfer_status = "Linking Done" # condition for error case # self.dao.update_upload(transfer) - for x in range(4): + """for x in range(4): time.sleep(3) t = f"{datetime.datetime.now()}" print(f">>>>>> time from api: {t}") - action.transfer_status = t + action.transfer_status = t""" log.info(f"^^^^^^^^^^^^^^^^^ link_blob_to_doc res: {res!r}") return res except Exception as exc: err = f"Error while linking blob to doc: {exc!r}" - transfer.status = TransferStatus.CANCELLED - self.dao.set_transfer_status("uploads", transfer) + transfer.status = TransferStatus.SUSPENDED + self.dao.set_transfer_status("upload", transfer) action.transfer_status = "Error" log.warning(err) finally: From d7308ca51cafb203839e975ec59e98a7969fbadc Mon Sep 17 00:00:00 2001 From: Sweta Yadav Date: Thu, 5 Oct 2023 12:22:34 +0530 Subject: [PATCH 05/24] NXDRIVE-2711: Upload is still alive --- nxdrive/client/uploader/__init__.py | 58 ++++------------------ nxdrive/client/uploader/direct_transfer.py | 6 --- nxdrive/dao/engine.py | 13 ++--- nxdrive/engine/activity.py | 6 --- nxdrive/engine/processor.py | 21 +++----- nxdrive/engine/queue_manager.py | 2 - nxdrive/engine/workers.py | 4 -- nxdrive/gui/application.py | 1 - nxdrive/gui/view.py | 31 ------------ 9 files changed, 23 insertions(+), 119 deletions(-) diff --git a/nxdrive/client/uploader/__init__.py b/nxdrive/client/uploader/__init__.py index 6272e98a19..0e574bb74a 100644 --- a/nxdrive/client/uploader/__init__.py +++ b/nxdrive/client/uploader/__init__.py @@ -1,9 +1,7 @@ """ Uploader used by the Remote client for all upload stuff. """ -import datetime import json -import time from abc import abstractmethod from logging import getLogger from pathlib import Path @@ -228,7 +226,6 @@ def upload_impl( else: try: self.upload_chunks(transfer, blob, chunked) - log.info(f"########### upload_chunk: {transfer}") finally: if blob.fd: blob.fd.close() @@ -345,12 +342,10 @@ def upload_chunks(self, transfer: Upload, blob: FileBlob, chunked: bool, /) -> N for _ in uploader.iter_upload(): # Ensure the batchId will not be purged while uploading the content last_ping = self._ping_batch_id(transfer, last_ping) - log.debug(f">>>>>>> upload_chunks, last_ping: {last_ping}") action.progress = action.chunk_size * len( uploader.blob.uploadedChunkIds ) - log.debug(f">>>>>> action.progress: {action.progress}") # Save the progression transfer.progress = action.get_percent() @@ -399,7 +394,6 @@ def upload_chunks(self, transfer: Upload, blob: FileBlob, chunked: bool, /) -> N log.warning(err) raise HTTPError(status=500, message=err) from exc finally: - log.debug(f">>>> action: finish action {action.finish_action}") action.finish_action() def _link_blob_to_doc( @@ -433,8 +427,6 @@ def link_blob_to_doc( # type: ignore[return] ) -> Dict[str, Any]: """Link the given uploaded *blob* to the given document.""" - log.info("^^^^^^^^^^^^^^^^^ link_blob_to_doc ") - headers = {"Nuxeo-Transaction-Timeout": str(TX_TIMEOUT)} if transfer.request_uid: headers[IDEMPOTENCY_KEY] = transfer.request_uid @@ -452,7 +444,7 @@ def link_blob_to_doc( # type: ignore[return] engine=transfer.engine, doc_pair=transfer.doc_pair, ) - log.info(f"^^^^^^^^^^^^^^^^^ link_blob_to_doc action: {action!r}") + action.transfer_status = "" action.is_direct_transfer = transfer.is_direct_transfer if "headers" in kwargs: kwargs["headers"].update(headers) @@ -462,55 +454,23 @@ def link_blob_to_doc( # type: ignore[return] doc_type = kwargs.get("doc_type", "") if transfer.is_direct_transfer and doc_type and doc_type != "": - log.info( - f"&&&&&&1111 _transfer_docType_file transfer: {transfer!r}, \ - headers: {headers!r}, doc_type: {doc_type!r}" - ) res = self._transfer_docType_file(transfer, headers, doc_type) else: - log.info( - f"&&&&&&222 _transfer_autoType_file2 command: {command!r}, blob: {blob!r}, kwargs: {kwargs!r}" - ) res = self._transfer_autoType_file(command, blob, kwargs) - link_progress = True - while link_progress: - # api call - time.sleep(5) - res = self.remote.client.request( - "GET", - f"{self.remote.client.api_path}/upload/{transfer.batch_obj.batchId}/0", - headers=headers, - ssl_verify=self.verification_needed, - ) - # if res.status_code == 202: - # transfer.transfer_status = str(datetime.datetime.now()) # noqa - print(f">>>> status code: {res.status_code}") - if res.status_code != 404: - t = f"{datetime.datetime.now()}" - print(f">>>>>> time from api: {t}") - action.transfer_status = str(t) - continue - link_progress = False - # transfer.transfer_status = "Linking Done" - # condition for error case - # self.dao.update_upload(transfer) - - """for x in range(4): - time.sleep(3) - t = f"{datetime.datetime.now()}" - print(f">>>>>> time from api: {t}") - action.transfer_status = t""" - - log.info(f"^^^^^^^^^^^^^^^^^ link_blob_to_doc res: {res!r}") - return res except Exception as exc: err = f"Error while linking blob to doc: {exc!r}" - transfer.status = TransferStatus.SUSPENDED - self.dao.set_transfer_status("upload", transfer) + # self._set_transfer_status(transfer, TransferStatus.SUSPENDED) + # log.debug(f">>>> error start: {transfer}") + # self.dao.set_transfer_status("upload", transfer) action.transfer_status = "Error" + transfer.request_uid = str(uuid4()) + self.dao.update_upload_requestid(transfer) + log.debug(f">>>> error start1: {transfer}") log.warning(err) + + raise exc finally: action.finish_action() diff --git a/nxdrive/client/uploader/direct_transfer.py b/nxdrive/client/uploader/direct_transfer.py index a6260b8545..95143edfe5 100644 --- a/nxdrive/client/uploader/direct_transfer.py +++ b/nxdrive/client/uploader/direct_transfer.py @@ -79,7 +79,6 @@ def upload( if doc_pair.folderish: if not doc_pair.doc_type: - log.info("------ not doc_pair.doc_type") item = self.remote.upload_folder( doc_pair.remote_parent_path, {"title": doc_pair.local_name}, @@ -87,7 +86,6 @@ def upload( ) else: try: - log.info("------ else") payload = { "entity-type": "document", "name": doc_pair.local_name, @@ -102,7 +100,6 @@ def upload( filepath = f"{doc_pair.remote_parent_path}/{doc_pair.local_name}" item = self.remote.fetch(filepath) except NotFound: - log.info("------ inside if--> else except") raise NotFound( f"Could not find {filepath!r} on {self.remote.client.host}" ) @@ -110,7 +107,6 @@ def upload( else: # Only replace the document if the user wants to overwrite = doc_pair.duplicate_behavior == "override" - log.debug("----- else-->") # Upload the blob and use the FileManager importer to create the document item = super().upload_impl( @@ -136,7 +132,5 @@ def upload( ) }, ) - log.info("-----------calling FileManager.Import inside else") - log.info("-----------calling FileManager.Import") self.dao.save_session_item(doc_pair.session, item) return item diff --git a/nxdrive/dao/engine.py b/nxdrive/dao/engine.py index db139ac31c..b584332366 100644 --- a/nxdrive/dao/engine.py +++ b/nxdrive/dao/engine.py @@ -1501,7 +1501,6 @@ def queue_children(self, row: DocPair, /) -> None: def increase_error( self, row: DocPair, error: str, /, *, details: str = None, incr: int = 1 ) -> None: - log.info(f"increase_error DB -->> error: {error!r}") with self.lock: error_date = datetime.utcnow() c = self._get_write_connection().cursor() @@ -2382,6 +2381,13 @@ def update_upload(self, upload: Upload, /) -> None: sql = "UPDATE Uploads SET batch = ? WHERE uid = ?" c.execute(sql, (json.dumps(batch), upload.uid)) + def update_upload_requestid(self, upload: Upload, /) -> None: + """Update a upload.""" + + c = self._get_write_connection().cursor() + sql = "UPDATE Uploads SET request_uid = ? WHERE uid = ?" + c.execute(sql, (upload.request_uid, upload.uid)) + def pause_transfer( self, nature: str, @@ -2550,11 +2556,6 @@ def set_transfer_status( f"UPDATE {table} SET status = ? WHERE uid = ?", (transfer.status.value, transfer.uid), ) - log.info( - f">>>>>>>>>>>>>>> updating table {table!r} \ - with transfer.status.value: {transfer.status.value!r} \ - and transfer.uid: {transfer.uid!r}" - ) self.directTransferUpdated.emit() def remove_transfer( diff --git a/nxdrive/engine/activity.py b/nxdrive/engine/activity.py index 99e03ecbba..8956486c17 100644 --- a/nxdrive/engine/activity.py +++ b/nxdrive/engine/activity.py @@ -66,7 +66,6 @@ def get_current_action(*, thread_id: int = None) -> Optional["Action"]: @staticmethod def finish_action() -> None: action = Action.actions.pop(current_thread_id(), None) - if action: action.finish() @@ -135,9 +134,6 @@ def __init__( # Used to know if the file is a Direct Transfer item self.is_direct_transfer = False - """a = str(datetime.datetime.now()) - print(f">>>>>> a:: {a}") - self.transfer_status = a""" self._connect_reporter(reporter) self.started.emit(self) @@ -282,8 +278,6 @@ def __init__( doc_pair=doc_pair, ) self.progress = size - """import datetime - self.transfer_status = str(datetime.datetime.now())""" def tooltip(doing: str): # type: ignore diff --git a/nxdrive/engine/processor.py b/nxdrive/engine/processor.py index a90dd36d20..54a0581af7 100644 --- a/nxdrive/engine/processor.py +++ b/nxdrive/engine/processor.py @@ -253,7 +253,7 @@ def _handle_doc_pair_sync(self, doc_pair: DocPair, sync_handler: Callable) -> No def _handle_doc_pair_dt(self, doc_pair: DocPair, sync_handler: Callable) -> None: """Actions to be done to handle a Direct Transfer item. Called by ._execute().""" - log.debug(f"...Calling {sync_handler.__name__}()") + log.debug(f"Calling {sync_handler.__name__}()") try: sync_handler(doc_pair) except NotFound: @@ -360,14 +360,14 @@ def _execute(self) -> None: log.info(f"{type(exc).__name__}, wait 1s and requeue") sleep(1) self.engine.queue_manager.push(doc_pair) - except CONNECTION_ERROR as exc: + except CONNECTION_ERROR: # TODO: # Add detection for server unavailability to stop all sync # instead of putting files in error log.debug("Connection issue", exc_info=True) - # self._direct_transfer_cancel(doc_pair) - self.increase_error(doc_pair, "CONNECTION_ERROR", exception=exc) - # self._postpone_pair(doc_pair, "CONNECTION_ERROR") + # self.increase_error(doc_pair, "CONNECTION_ERROR") + self._postpone_pair(doc_pair, "CONNECTION_ERROR") + # self.increase_error(doc_pair, "CONNECTION_ERROR", exception=exc) except MaxRetryError: log.warning("Connection retries issue", exc_info=True) self._postpone_pair(doc_pair, "MAX_RETRY_ERROR") @@ -381,14 +381,12 @@ def _execute(self) -> None: log.warning("Delaying conflicted document") self._postpone_pair(doc_pair, "Conflict") except HTTPError as exc: - log.info(f"HTTP ERROR: {exc!r}") if exc.status == 404: # We saw it happened once a migration is done. # Nuxeo kept the document reference but it does # not exist physically anywhere. log.info("The document does not exist anymore") - # self.increase_error(doc_pair, "SERVER_ERROR", exception=exc) - # self.dao.remove_state(doc_pair) + self.dao.remove_state(doc_pair) elif exc.status == 416: log.warning("Invalid downloaded temporary file") tmp_folder = ( @@ -397,11 +395,8 @@ def _execute(self) -> None: with suppress(FileNotFoundError): shutil.rmtree(tmp_folder) self._postpone_pair(doc_pair, "Requested Range Not Satisfiable") - elif exc.status in (405, 408): + elif exc.status in (405, 408, 500): self.increase_error(doc_pair, "SERVER_ERROR", exception=exc) - elif exc.status == 500: - log.info(f"Encountered error: {exc!r}") - self._direct_transfer_cancel(doc_pair) elif exc.status in (502, 503, 504): log.warning("Server is unavailable", exc_info=True) self._check_exists_on_the_server(doc_pair) @@ -409,7 +404,6 @@ def _execute(self) -> None: error = f"{handler_name}_http_error_{exc.status}" self._handle_pair_handler_exception(doc_pair, error, exc) except UploadError as exc: - log.info(f"Upload ERROR: {exc!r}") exc_info = True if "ExpiredToken" in exc.info: # It happens to non-chunked uploads, it is safe to restart the upload completely @@ -426,7 +420,6 @@ def _execute(self) -> None: ) self._postpone_pair(doc_pair, "Upload") except (DownloadPaused, UploadPaused) as exc: - log.info(f"DownloadPaused/ UploadPaused: {exc!r}") nature = "download" if isinstance(exc, DownloadPaused) else "upload" log.info(f"Pausing {nature} {exc.transfer_id!r}") self.engine.dao.set_transfer_doc( diff --git a/nxdrive/engine/queue_manager.py b/nxdrive/engine/queue_manager.py index 71a69908af..bf5ae24511 100644 --- a/nxdrive/engine/queue_manager.py +++ b/nxdrive/engine/queue_manager.py @@ -215,7 +215,6 @@ def push(self, state: Union[DocPair, QueueItem], /) -> None: @pyqtSlot() def _on_error_timer(self) -> None: - log.info("++++++++ _on_error_timer") with self._error_lock: cur_time = int(time.time()) for doc_pair in self._on_error_queue.copy().values(): @@ -245,7 +244,6 @@ def get_error_threshold(self) -> int: def push_error( self, doc_pair: DocPair, /, *, exception: Exception = None, interval: int = None ) -> None: - log.info(f"--->>> push_error({doc_pair!r}, {exception!r})") error_count = doc_pair.error_count err_code = WINERROR_CODE_PROCESS_CANNOT_ACCESS_FILE emit_sig = doc_pair.id not in self._on_error_queue diff --git a/nxdrive/engine/workers.py b/nxdrive/engine/workers.py index f9d2393ca2..62d8cb8b30 100644 --- a/nxdrive/engine/workers.py +++ b/nxdrive/engine/workers.py @@ -233,13 +233,9 @@ def giveup_error( def increase_error( self, doc_pair: DocPair, error: str, /, *, exception: Exception = None ) -> None: - log.info(f"increase_error -->> error: {error!r}") details = str(exception) if exception else None log.info(f"Increasing error [{error}] ({details}) for {doc_pair!r}") self.dao.increase_error(doc_pair, error, details=details) - log.info( - f"--->>> self.engine.queue_manager.push_error({doc_pair!r}, {exception!r})" - ) self.engine.queue_manager.push_error(doc_pair, exception=exception) def remove_void_transfers(self, doc_pair: DocPair, /) -> None: diff --git a/nxdrive/gui/application.py b/nxdrive/gui/application.py index 6af2c290c0..609768aed5 100644 --- a/nxdrive/gui/application.py +++ b/nxdrive/gui/application.py @@ -1875,7 +1875,6 @@ def refresh_direct_transfer_items(self, dao: EngineDAO, /) -> None: for transfer in transfers: if transfer["doc_pair"] in pair_finalizing: transfer["finalizing"] = True - log.debug(f">>>>>>>> finalizing: True for {transfers}") self.direct_transfer_model.update_items(transfers) @pyqtSlot(object) diff --git a/nxdrive/gui/view.py b/nxdrive/gui/view.py index 7679c84824..736813a434 100755 --- a/nxdrive/gui/view.py +++ b/nxdrive/gui/view.py @@ -155,7 +155,6 @@ class TransferModel(QAbstractListModel): IS_DIRECT_EDIT = qt.UserRole + 7 FINALIZING = qt.UserRole + 8 PROGRESS_METRICS = qt.UserRole + 9 - # FINALIZING_MSG = qt.UserRole + 10 def __init__(self, translate: Callable, /, *, parent: QObject = None) -> None: super().__init__(parent) @@ -173,7 +172,6 @@ def __init__(self, translate: Callable, /, *, parent: QObject = None) -> None: # The is the Verification step for downloads # and Linking step for uploads. self.FINALIZING: b"finalizing", - # self.FINALIZING_MSG: b"test_val", } def rowCount(self, parent: QModelIndex = QModelIndex(), /) -> int: @@ -232,9 +230,6 @@ def data(self, index: QModelIndex, role: int, /) -> Any: return row.get("finalizing", False) if role == self.PROGRESS_METRICS: return self.get_progress(row) - """if role == self.FINALIZING_MSG: - if row.get("finalizing", False) == True: - return str(datetime.datetime.now())""" return row[self.names[role].decode()] def setData(self, index: QModelIndex, value: Any, /, *, role: int = None) -> None: @@ -259,11 +254,8 @@ def set_progress(self, action: Dict[str, Any], /) -> None: self.setData(idx, action["progress"], role=self.PROGRESS) self.setData(idx, action["progress"], role=self.PROGRESS_METRICS) - log.info(f'>>>>>>> idx {idx}, {action["progress"]}, {self.PROGRESS}') - log.debug(f'>>>>>>> idx {idx}, {action["progress"]}, {self.PROGRESS}') if action["action_type"] in ("Linking", "Verification"): self.setData(idx, True, role=self.FINALIZING) - # self.setData(idx, True, role=self.FINALIZING_MSG) def flags(self, index: QModelIndex, /) -> Qt.ItemFlags: return qt.ItemIsEditable | qt.ItemIsEnabled | qt.ItemIsSelectable @@ -368,51 +360,28 @@ def data(self, index: QModelIndex, role: int, /) -> Any: return self.psize(row["filesize"] * row["progress"] / 100) if role == self.FINALIZING_MSG: a = row.get("transfer_status") - log.debug(f">>>>>>>> time: {a}") return a return row[self.names[role].decode()] def setData(self, index: QModelIndex, value: Any, /, *, role: int = None) -> None: if role is None: return - log.info( - f"************************* inside setData: index: {index!r}, value: {value!r}, role: {role!r}" - ) key = force_decode(self.roleNames()[role]) - log.info(f"************************* inside setData: key: {key!r}") self.items[index.row()][key] = value - log.info( - f"************************* inside setData: self.items: {self.items!r}" - ) - log.info( - f"************************* inside setData: self.dataChanged.emit({index!r}, {index!r}, {[role]!r})" - ) self.dataChanged.emit(index, index, [role]) @pyqtSlot(dict) def set_progress(self, action: Dict[str, Any], /) -> None: - log.info(f"-++++++++++++++++++++++++++++++++ {self.items}") for i, item in enumerate(self.items): - log.info(f"-------------------------- {i},-->>>> {item}") - log.info(f"-------------------------- {action}") if ( item["engine"] != action["engine"] or item["doc_pair"] != action["doc_pair"] ): - log.info("+++++++++++++++++++++++++++++++ continue") continue idx = self.createIndex(i, 0) self.setData(idx, action["progress"], role=self.PROGRESS) self.setData(idx, action["progress"], role=self.TRANSFERRED) if action["action_type"] == "Linking": - import datetime - - current_time = datetime.datetime.now() - log.info("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^") - log.info( - f"Finalizing the Upload. Last status received from server at {current_time}" - ) - log.info("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^") self.setData(idx, True, role=self.FINALIZING) self.setData(idx, action["transfer_status"], role=self.FINALIZING_MSG) From d82ddbba557ecead64be9ee9afc2fc149c789f7f Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Tue, 10 Oct 2023 12:15:55 +0530 Subject: [PATCH 06/24] NXDRIVE-2711: Show that upload is still alive for very large files --- nxdrive/client/uploader/__init__.py | 6 ++---- nxdrive/data/i18n/i18n.json | 2 +- nxdrive/data/qml/TransferItem.qml | 2 +- nxdrive/engine/processor.py | 2 -- nxdrive/gui/view.py | 3 --- 5 files changed, 4 insertions(+), 11 deletions(-) diff --git a/nxdrive/client/uploader/__init__.py b/nxdrive/client/uploader/__init__.py index 0e574bb74a..7e2ba022d6 100644 --- a/nxdrive/client/uploader/__init__.py +++ b/nxdrive/client/uploader/__init__.py @@ -461,13 +461,11 @@ def link_blob_to_doc( # type: ignore[return] return res except Exception as exc: err = f"Error while linking blob to doc: {exc!r}" - # self._set_transfer_status(transfer, TransferStatus.SUSPENDED) - # log.debug(f">>>> error start: {transfer}") - # self.dao.set_transfer_status("upload", transfer) action.transfer_status = "Error" + if "TCPKeepAliveHTTPSConnectionPool" in exc: + raise exc transfer.request_uid = str(uuid4()) self.dao.update_upload_requestid(transfer) - log.debug(f">>>> error start1: {transfer}") log.warning(err) raise exc diff --git a/nxdrive/data/i18n/i18n.json b/nxdrive/data/i18n/i18n.json index 25ab0c70d1..8cd3630806 100644 --- a/nxdrive/data/i18n/i18n.json +++ b/nxdrive/data/i18n/i18n.json @@ -104,7 +104,7 @@ "DIRECT_TRANSFER_CANCEL": "Do you confirm the cancellation of the transfer of \"%1\"?", "DIRECT_TRANSFER_CANCEL_HEADER": "A transfer cancellation has been asked.", "DIRECT_TRANSFER_DETAILS": "[%1%] %2 of %3", - "DIRECT_TRANSFER_FINALIZING_DETAILS": "Upload is in progress. Last updated time: %1", + "DIRECT_TRANSFER_FINALIZING_DETAILS": "An error occured during the transfer, it will resume shortly.", "DIRECT_TRANSFER_END": "Transfer done: \"%1\"", "DIRECT_TRANSFER_ERROR": "Transfer error: \"%1\"", "DIRECT_TRANSFER_NO_ACCOUNT": "Cannot use the Direct Transfer feature with no account, aborting.", diff --git a/nxdrive/data/qml/TransferItem.qml b/nxdrive/data/qml/TransferItem.qml index a6049ff124..d9cf653a07 100644 --- a/nxdrive/data/qml/TransferItem.qml +++ b/nxdrive/data/qml/TransferItem.qml @@ -79,7 +79,7 @@ Rectangle { } ScaledText { - text: qsTr("DIRECT_TRANSFER_FINALIZING_DETAILS").arg(transfer_status) + tl.tr + text: qsTr("DIRECT_TRANSFER_FINALIZING_DETAILS") + tl.tr color: secondaryText visible: finalizing && transfer_status Layout.leftMargin: icon.width + 5 diff --git a/nxdrive/engine/processor.py b/nxdrive/engine/processor.py index 54a0581af7..209cdc5fef 100644 --- a/nxdrive/engine/processor.py +++ b/nxdrive/engine/processor.py @@ -365,9 +365,7 @@ def _execute(self) -> None: # Add detection for server unavailability to stop all sync # instead of putting files in error log.debug("Connection issue", exc_info=True) - # self.increase_error(doc_pair, "CONNECTION_ERROR") self._postpone_pair(doc_pair, "CONNECTION_ERROR") - # self.increase_error(doc_pair, "CONNECTION_ERROR", exception=exc) except MaxRetryError: log.warning("Connection retries issue", exc_info=True) self._postpone_pair(doc_pair, "MAX_RETRY_ERROR") diff --git a/nxdrive/gui/view.py b/nxdrive/gui/view.py index 736813a434..ac2673344f 100755 --- a/nxdrive/gui/view.py +++ b/nxdrive/gui/view.py @@ -1,5 +1,4 @@ from functools import partial -from logging import getLogger from typing import TYPE_CHECKING, Any, Callable, Dict, List, Tuple from dateutil.tz import tzlocal @@ -19,8 +18,6 @@ from ..translator import Translator from ..utils import force_decode, get_date_from_sqlite, sizeof_fmt -log = getLogger(__name__) - if TYPE_CHECKING: from .application import Application # noqa from ..engine.engine import Engine # noqa From cf30aaaaf702cd6c8edfd26bfa1418e3d0706e27 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Tue, 10 Oct 2023 12:30:33 +0530 Subject: [PATCH 07/24] NXDRIVE-2711: Show that upload is still alive for very large files --- nxdrive/client/uploader/__init__.py | 8 +++----- nxdrive/dao/engine.py | 3 +-- nxdrive/data/i18n/i18n.json | 2 +- nxdrive/data/qml/TransferItem.qml | 4 ++-- nxdrive/engine/activity.py | 26 +++++++++++++------------- nxdrive/gui/view.py | 12 +++++++----- 6 files changed, 27 insertions(+), 28 deletions(-) diff --git a/nxdrive/client/uploader/__init__.py b/nxdrive/client/uploader/__init__.py index 7e2ba022d6..2a97fcf250 100644 --- a/nxdrive/client/uploader/__init__.py +++ b/nxdrive/client/uploader/__init__.py @@ -444,14 +444,13 @@ def link_blob_to_doc( # type: ignore[return] engine=transfer.engine, doc_pair=transfer.doc_pair, ) - action.transfer_status = "" + action.finalizing_status = "" action.is_direct_transfer = transfer.is_direct_transfer if "headers" in kwargs: kwargs["headers"].update(headers) else: kwargs["headers"] = headers try: - doc_type = kwargs.get("doc_type", "") if transfer.is_direct_transfer and doc_type and doc_type != "": res = self._transfer_docType_file(transfer, headers, doc_type) @@ -461,13 +460,12 @@ def link_blob_to_doc( # type: ignore[return] return res except Exception as exc: err = f"Error while linking blob to doc: {exc!r}" - action.transfer_status = "Error" + log.warning(err) + action.finalizing_status = "Error" if "TCPKeepAliveHTTPSConnectionPool" in exc: raise exc transfer.request_uid = str(uuid4()) self.dao.update_upload_requestid(transfer) - log.warning(err) - raise exc finally: action.finish_action() diff --git a/nxdrive/dao/engine.py b/nxdrive/dao/engine.py index b584332366..3f53145197 100644 --- a/nxdrive/dao/engine.py +++ b/nxdrive/dao/engine.py @@ -2382,8 +2382,7 @@ def update_upload(self, upload: Upload, /) -> None: c.execute(sql, (json.dumps(batch), upload.uid)) def update_upload_requestid(self, upload: Upload, /) -> None: - """Update a upload.""" - + """Incase of error during linking, update request_uid for upload""" c = self._get_write_connection().cursor() sql = "UPDATE Uploads SET request_uid = ? WHERE uid = ?" c.execute(sql, (upload.request_uid, upload.uid)) diff --git a/nxdrive/data/i18n/i18n.json b/nxdrive/data/i18n/i18n.json index 8cd3630806..722dc6a874 100644 --- a/nxdrive/data/i18n/i18n.json +++ b/nxdrive/data/i18n/i18n.json @@ -104,9 +104,9 @@ "DIRECT_TRANSFER_CANCEL": "Do you confirm the cancellation of the transfer of \"%1\"?", "DIRECT_TRANSFER_CANCEL_HEADER": "A transfer cancellation has been asked.", "DIRECT_TRANSFER_DETAILS": "[%1%] %2 of %3", - "DIRECT_TRANSFER_FINALIZING_DETAILS": "An error occured during the transfer, it will resume shortly.", "DIRECT_TRANSFER_END": "Transfer done: \"%1\"", "DIRECT_TRANSFER_ERROR": "Transfer error: \"%1\"", + "DIRECT_TRANSFER_FINALIZING_ERROR": "An error occured during the transfer, it will resume shortly.", "DIRECT_TRANSFER_NO_ACCOUNT": "Cannot use the Direct Transfer feature with no account, aborting.", "DIRECT_TRANSFER_NOT_ALLOWED": "Direct Transfer of \"%1\" is not allowed for synced files.", "DIRECT_TRANSFER_NOT_ENABLED": "The Direct Transfer feature is not enabled.", diff --git a/nxdrive/data/qml/TransferItem.qml b/nxdrive/data/qml/TransferItem.qml index d9cf653a07..cd1d79fafe 100644 --- a/nxdrive/data/qml/TransferItem.qml +++ b/nxdrive/data/qml/TransferItem.qml @@ -79,9 +79,9 @@ Rectangle { } ScaledText { - text: qsTr("DIRECT_TRANSFER_FINALIZING_DETAILS") + tl.tr + text: qsTr("DIRECT_TRANSFER_FINALIZING_ERROR") + tl.tr color: secondaryText - visible: finalizing && transfer_status + visible: finalizing && finalizing_status Layout.leftMargin: icon.width + 5 font.pointSize: point_size * 0.8 } diff --git a/nxdrive/engine/activity.py b/nxdrive/engine/activity.py index 8956486c17..73572e55fd 100644 --- a/nxdrive/engine/activity.py +++ b/nxdrive/engine/activity.py @@ -20,13 +20,13 @@ class Action(QObject): actions: Dict[int, Optional["Action"]] = {} def __init__( - self, action_type: str, /, *, progress: float = 0.0, transfer_status: str = "" + self, action_type: str, /, *, progress: float = 0.0, finalizing_status: str = "" ) -> None: super().__init__() self.type = action_type self._progress = progress - self._transfer_status = transfer_status + self._finalizing_status = finalizing_status self.size = 0 self.uid = str(uuid.uuid4()) @@ -47,12 +47,12 @@ def get_percent(self) -> float: return self.progress @property - def transfer_status(self) -> str: - return self._transfer_status + def finalizing_status(self) -> str: + return self._finalizing_status - @transfer_status.setter - def transfer_status(self, value: str, /) -> None: - self._transfer_status = value + @finalizing_status.setter + def finalizing_status(self, value: str, /) -> None: + self._finalizing_status = value @staticmethod def get_actions() -> Dict[int, Optional["Action"]]: @@ -77,7 +77,7 @@ def export(self) -> Dict[str, Any]: "uid": self.uid, "action_type": self.type, "progress": self.get_percent(), - "transfer_status": self.transfer_status, + "finalizing_status": self.finalizing_status, } def __repr__(self) -> str: @@ -162,12 +162,12 @@ def progress(self, value: float, /) -> None: self.progressing.emit(self) @property - def transfer_status(self) -> str: - return self._transfer_status + def finalizing_status(self) -> str: + return self._finalizing_status - @transfer_status.setter - def transfer_status(self, value: str, /) -> None: - self._transfer_status = value + @finalizing_status.setter + def finalizing_status(self, value: str, /) -> None: + self._finalizing_status = value self.progressing.emit(self) def get_percent(self) -> float: diff --git a/nxdrive/gui/view.py b/nxdrive/gui/view.py index ac2673344f..d4c5664612 100755 --- a/nxdrive/gui/view.py +++ b/nxdrive/gui/view.py @@ -273,7 +273,7 @@ class DirectTransferModel(QAbstractListModel): REMOTE_PARENT_REF = qt.UserRole + 10 SHADOW = qt.UserRole + 11 # Tell the interface if the row should be visible or not DOC_PAIR = qt.UserRole + 12 - FINALIZING_MSG = qt.UserRole + 13 + FINALIZING_STATUS = qt.UserRole + 13 def __init__(self, translate: Callable, /, *, parent: QObject = None) -> None: super().__init__(parent) @@ -292,7 +292,7 @@ def __init__(self, translate: Callable, /, *, parent: QObject = None) -> None: self.REMOTE_PARENT_REF: b"remote_parent_ref", self.SHADOW: b"shadow", self.DOC_PAIR: b"doc_pair", - self.FINALIZING_MSG: b"transfer_status", + self.FINALIZING_STATUS: b"finalizing_status", } # Pretty print self.psize = partial(sizeof_fmt, suffix=self.tr("BYTE_ABBREV")) @@ -355,8 +355,8 @@ def data(self, index: QModelIndex, role: int, /) -> Any: return self.psize(row["filesize"]) if role == self.TRANSFERRED: return self.psize(row["filesize"] * row["progress"] / 100) - if role == self.FINALIZING_MSG: - a = row.get("transfer_status") + if role == self.FINALIZING_STATUS: + a = row.get("finalizing_status") return a return row[self.names[role].decode()] @@ -380,7 +380,9 @@ def set_progress(self, action: Dict[str, Any], /) -> None: self.setData(idx, action["progress"], role=self.TRANSFERRED) if action["action_type"] == "Linking": self.setData(idx, True, role=self.FINALIZING) - self.setData(idx, action["transfer_status"], role=self.FINALIZING_MSG) + self.setData( + idx, action["finalizing_status"], role=self.FINALIZING_STATUS + ) def add_item(self, parent: QModelIndex, n_item: Dict[str, Any], /) -> None: """Add an item to existing list.""" From 9664a78a868a585969f260bd6f72f3bb50488f37 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Tue, 10 Oct 2023 12:37:17 +0530 Subject: [PATCH 08/24] NXDRIVE-2711: Show that upload is still alive for very large files --- docs/changes/5.3.3.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changes/5.3.3.md b/docs/changes/5.3.3.md index 7b3b41a58e..ba1cc58f8b 100644 --- a/docs/changes/5.3.3.md +++ b/docs/changes/5.3.3.md @@ -12,7 +12,7 @@ Release date: `2023-xx-xx` ### Direct Transfer -- [NXDRIVE-2](https://jira.nuxeo.com/browse/NXDRIVE-2): +- [NXDRIVE-2711](https://jira.nuxeo.com/browse/NXDRIVE-2711): Show that upload is still alive for very large files ## GUI From 70481e677ffef555bf5cfab32c78b87a37962847 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Wed, 11 Oct 2023 12:46:50 +0530 Subject: [PATCH 09/24] NXDRIVE-2711: Show that upload is still alive for very large files --- nxdrive/client/uploader/__init__.py | 4 ++-- tests/unit/test_utils.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/nxdrive/client/uploader/__init__.py b/nxdrive/client/uploader/__init__.py index 2a97fcf250..c18df802d2 100644 --- a/nxdrive/client/uploader/__init__.py +++ b/nxdrive/client/uploader/__init__.py @@ -416,7 +416,7 @@ def _link_blob_to_doc( self._set_transfer_status(transfer, TransferStatus.ONGOING) raise exc - def link_blob_to_doc( # type: ignore[return] + def link_blob_to_doc( self, command: str, transfer: Upload, @@ -462,7 +462,7 @@ def link_blob_to_doc( # type: ignore[return] err = f"Error while linking blob to doc: {exc!r}" log.warning(err) action.finalizing_status = "Error" - if "TCPKeepAliveHTTPSConnectionPool" in exc: + if "TCPKeepAliveHTTPSConnectionPool" in str(exc): raise exc transfer.request_uid = str(uuid4()) self.dao.update_upload_requestid(transfer) diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 2e9133e5ca..5c0d96cc95 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -413,7 +413,7 @@ def test_request_verify_ca_bundle_file(caplog, tmp_path): # Save the certificate for the first time caplog.clear() cert = nxdrive.utils.requests_verify(ca_bundle, False) - path = "" if type(cert) == bool else cert + path = "" if isinstance(bool, type(cert)) else cert final_certificate = Path(path) records = [line.message for line in caplog.records] assert len(records) == 3 @@ -451,7 +451,7 @@ def test_request_verify_ca_bundle_file_is_str(caplog, tmp_path): # Save the certificate for the first time caplog.clear() cert = nxdrive.utils.requests_verify(ca_bundle, False) - path = "" if type(cert) == bool else cert + path = "" if isinstance(bool, type(cert)) else cert final_certificate = Path(path) records = [line.message for line in caplog.records] assert len(records) == 3 @@ -494,7 +494,7 @@ def test_request_verify_ca_bundle_file_mimic_updates(caplog, tmp_path): # Save the certificate for the first time caplog.clear() cert = nxdrive.utils.requests_verify(ca_bundle, False) - path = "" if type(cert) == bool else cert + path = "" if isinstance(bool, type(cert)) else cert final_certificate_1 = Path(path) records = [line.message for line in caplog.records] assert len(records) == 3 From 7524f020f85522b619ac1bc42c4c8722314ceba6 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Wed, 11 Oct 2023 13:10:05 +0530 Subject: [PATCH 10/24] NXDRIVE-2711: Show that upload is still alive for very large files --- nxdrive/dao/engine.py | 2 +- nxdrive/data/i18n/i18n.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nxdrive/dao/engine.py b/nxdrive/dao/engine.py index 3f53145197..dc6c950d67 100644 --- a/nxdrive/dao/engine.py +++ b/nxdrive/dao/engine.py @@ -2382,7 +2382,7 @@ def update_upload(self, upload: Upload, /) -> None: c.execute(sql, (json.dumps(batch), upload.uid)) def update_upload_requestid(self, upload: Upload, /) -> None: - """Incase of error during linking, update request_uid for upload""" + """In case of error during linking, update request_uid for upload""" c = self._get_write_connection().cursor() sql = "UPDATE Uploads SET request_uid = ? WHERE uid = ?" c.execute(sql, (upload.request_uid, upload.uid)) diff --git a/nxdrive/data/i18n/i18n.json b/nxdrive/data/i18n/i18n.json index 722dc6a874..69a6120b0e 100644 --- a/nxdrive/data/i18n/i18n.json +++ b/nxdrive/data/i18n/i18n.json @@ -106,7 +106,7 @@ "DIRECT_TRANSFER_DETAILS": "[%1%] %2 of %3", "DIRECT_TRANSFER_END": "Transfer done: \"%1\"", "DIRECT_TRANSFER_ERROR": "Transfer error: \"%1\"", - "DIRECT_TRANSFER_FINALIZING_ERROR": "An error occured during the transfer, it will resume shortly.", + "DIRECT_TRANSFER_FINALIZING_ERROR": "An error occurred during the transfer, it will resume shortly.", "DIRECT_TRANSFER_NO_ACCOUNT": "Cannot use the Direct Transfer feature with no account, aborting.", "DIRECT_TRANSFER_NOT_ALLOWED": "Direct Transfer of \"%1\" is not allowed for synced files.", "DIRECT_TRANSFER_NOT_ENABLED": "The Direct Transfer feature is not enabled.", From bd016cdee0a8c6132ae798e773f14b07a3592d9f Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Thu, 12 Oct 2023 12:34:18 +0530 Subject: [PATCH 11/24] NXDRIVE-2711: Show that upload is still alive for very large files --- nxdrive/dao/engine.py | 7 ++++--- tests/unit/conftest.py | 33 +++++++++++++++++++++++++++++- tests/unit/test_client_uploader.py | 16 +++++++++++++++ tests/unit/test_engine_dao.py | 21 ++++++++++++++++++- 4 files changed, 72 insertions(+), 5 deletions(-) create mode 100644 tests/unit/test_client_uploader.py diff --git a/nxdrive/dao/engine.py b/nxdrive/dao/engine.py index dc6c950d67..9f52a18d90 100644 --- a/nxdrive/dao/engine.py +++ b/nxdrive/dao/engine.py @@ -2383,9 +2383,10 @@ def update_upload(self, upload: Upload, /) -> None: def update_upload_requestid(self, upload: Upload, /) -> None: """In case of error during linking, update request_uid for upload""" - c = self._get_write_connection().cursor() - sql = "UPDATE Uploads SET request_uid = ? WHERE uid = ?" - c.execute(sql, (upload.request_uid, upload.uid)) + with self.lock: + c = self._get_write_connection().cursor() + sql = "UPDATE Uploads SET request_uid = ? WHERE uid = ?" + c.execute(sql, (upload.request_uid, upload.uid)) def pause_transfer( self, diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 44a037bf51..77bfde1fdc 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -7,12 +7,14 @@ import pytest from nxdrive.client.remote_client import Remote +from nxdrive.client.uploader import BaseUploader +from nxdrive.constants import TransferStatus from nxdrive.dao.engine import EngineDAO from nxdrive.dao.manager import ManagerDAO from nxdrive.engine.engine import Engine from nxdrive.engine.processor import Processor from nxdrive.manager import Manager -from nxdrive.objects import DocPair +from nxdrive.objects import DocPair, Upload from nxdrive.osi import AbstractOSIntegration from nxdrive.updater.darwin import Updater from nxdrive.utils import normalized_path @@ -141,6 +143,11 @@ def __init__(self, tmp_path): super().__init__(self, final_app) +class MockUploader(BaseUploader): + def __init__(self, Remote): + super().__init__(self, Remote) + + @pytest.fixture() def engine_dao(tmp_path): dao = MockEngineDAO @@ -188,3 +195,27 @@ def processor(engine, engine_dao): processor.remote = Remote processor.dao = engine_dao return processor + + +@pytest.fixture() +def baseuploader(): + baseuploader = MockUploader + return baseuploader + + +@pytest.fixture() +def upload(): + upload = Upload + upload.path = "/tmp" + upload.status = TransferStatus.ONGOING + upload.engine = f"{engine}" + upload.is_direct_edit = False + upload.is_direct_transfer = True + upload.filesize = "23.0" + upload.batch = {"batchID": f"{str(uuid4())}"} + upload.chunk_size = "345" + upload.remote_parent_path = "/tmp/remote_path" + upload.remote_parent_ref = "/tmp/remote_path_ref" + upload.doc_pair = "test_file" + upload.request_uid = str(uuid4()) + return upload diff --git a/tests/unit/test_client_uploader.py b/tests/unit/test_client_uploader.py new file mode 100644 index 0000000000..10a91a83ed --- /dev/null +++ b/tests/unit/test_client_uploader.py @@ -0,0 +1,16 @@ +from unittest.mock import Mock +from uuid import uuid4 + +from nuxeo.models import FileBlob + + +def test_link_blob_to_doc(baseuploader, upload, tmp_path): + file = tmp_path / f"{uuid4()}.txt" + file.write_bytes(b"content") + + baseuploader.dao = Mock() + baseuploader._transfer_autoType_file = Mock() + + baseuploader.link_blob_to_doc( + baseuploader, "Filemanager.Import", upload, FileBlob(str(file)), True + ) diff --git a/tests/unit/test_engine_dao.py b/tests/unit/test_engine_dao.py index 9cba64f50a..1c7f62caa6 100644 --- a/tests/unit/test_engine_dao.py +++ b/tests/unit/test_engine_dao.py @@ -1,8 +1,9 @@ import os import sqlite3 from datetime import datetime +from multiprocessing import RLock from pathlib import Path -from unittest.mock import patch +from unittest.mock import Mock, patch from uuid import uuid4 from nxdrive.constants import TransferStatus @@ -608,3 +609,21 @@ def test_migration_interface(): assert not interface.downgrade(cursor) assert not interface.previous_version assert not interface.version + + +def test_update_upload_requestid(engine_dao, upload): + """Test to save upload and update reuqest_uid of existing row""" + engine_dao.lock = RLock() + with engine_dao("engine_migration_18.db") as dao: + engine_dao.directTransferUpdated = Mock() + # Save New upload + engine_dao.save_upload(dao, upload) + + assert upload.uid + + previous_request_id = upload.request_uid + upload.request_uid = str(uuid4()) + # Update request_uid of existing record + engine_dao.update_upload_requestid(dao, upload) + + assert previous_request_id != upload.request_uid From 2968e81db0645cee8f7ab64fba70095374db928f Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Thu, 12 Oct 2023 16:36:40 +0530 Subject: [PATCH 12/24] NXDRIVE-2711: Show that upload is still alive for very large files --- tests/unit/test_action.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/unit/test_action.py b/tests/unit/test_action.py index fbc7d1ecdd..68c125672a 100644 --- a/tests/unit/test_action.py +++ b/tests/unit/test_action.py @@ -34,6 +34,12 @@ def test_action(): action.progress = 100.0 details = action.export() assert details["progress"] == 100.0 + Action.finish_action() + + # Test finalizing_status property setter + action.finalizing_status = "Finalize the upload" + details = action.export() + assert details["finalizing_status"] == "Finalize the upload" Action.finish_action() actions = Action.get_actions() From 7a400a76c82c45ca685646aa86685f038415ee78 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Tue, 17 Oct 2023 12:17:31 +0530 Subject: [PATCH 13/24] NXDRIVE-2711: Show that upload is still alive for very large files --- nxdrive/gui/view.py | 3 +-- tests/functional/test_view.py | 10 --------- tests/unit/conftest.py | 34 +++++++++++++++++++++++++++++- tests/unit/test_client_uploader.py | 1 + tests/unit/test_view.py | 27 ++++++++++++++++++++++++ 5 files changed, 62 insertions(+), 13 deletions(-) delete mode 100644 tests/functional/test_view.py create mode 100644 tests/unit/test_view.py diff --git a/nxdrive/gui/view.py b/nxdrive/gui/view.py index d4c5664612..b0fdbceec9 100755 --- a/nxdrive/gui/view.py +++ b/nxdrive/gui/view.py @@ -356,8 +356,7 @@ def data(self, index: QModelIndex, role: int, /) -> Any: if role == self.TRANSFERRED: return self.psize(row["filesize"] * row["progress"] / 100) if role == self.FINALIZING_STATUS: - a = row.get("finalizing_status") - return a + return row.get("finalizing_status") return row[self.names[role].decode()] def setData(self, index: QModelIndex, value: Any, /, *, role: int = None) -> None: diff --git a/tests/functional/test_view.py b/tests/functional/test_view.py deleted file mode 100644 index 8ac1897b1f..0000000000 --- a/tests/functional/test_view.py +++ /dev/null @@ -1,10 +0,0 @@ -from nxdrive.gui.view import FileModel - - -def test_foldersDialog(): - def func(): - return True - - file_model = FileModel(func) - returned_val = file_model.add_files([{"key": "val"}]) - assert not returned_val diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 77bfde1fdc..84cf8689c7 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,7 +1,7 @@ import os import shutil import time -from typing import Optional +from typing import Any, Callable, Optional from uuid import uuid4 import pytest @@ -13,9 +13,12 @@ from nxdrive.dao.manager import ManagerDAO from nxdrive.engine.engine import Engine from nxdrive.engine.processor import Processor +from nxdrive.gui.view import DirectTransferModel from nxdrive.manager import Manager from nxdrive.objects import DocPair, Upload from nxdrive.osi import AbstractOSIntegration +from nxdrive.qt import constants as qt +from nxdrive.qt.imports import QObject from nxdrive.updater.darwin import Updater from nxdrive.utils import normalized_path @@ -148,6 +151,13 @@ def __init__(self, Remote): super().__init__(self, Remote) +class MockDirectTransferModel(DirectTransferModel): + def __init__( + self, translate: Callable[..., Any], /, *, parent: QObject = None + ) -> None: + super().__init__(translate, parent=parent) + + @pytest.fixture() def engine_dao(tmp_path): dao = MockEngineDAO @@ -219,3 +229,25 @@ def upload(): upload.doc_pair = "test_file" upload.request_uid = str(uuid4()) return upload + + +@pytest.fixture() +def direct_transfer_model(): + direct_transfer_model = MockDirectTransferModel + direct_transfer_model.FINALIZING_STATUS = qt.UserRole + 13 + direct_transfer_model.items = [ + { + "uid": 1, + "name": "a.txt", + "filesize": 142936511610, + "status": "", + "engine": "51a2c2dc641311ee87fb...bfc0ec09fa", + "progress": 100.0, + "doc_pair": 1, + "remote_parent_path": "/default-domain/User...TestFolder", + "remote_parent_ref": "7b7886ea-5ad9-460d-8...1607ea0081", + "shadow": True, + "finalizing": True, + } + ] + return direct_transfer_model diff --git a/tests/unit/test_client_uploader.py b/tests/unit/test_client_uploader.py index 10a91a83ed..f3ce8194ee 100644 --- a/tests/unit/test_client_uploader.py +++ b/tests/unit/test_client_uploader.py @@ -5,6 +5,7 @@ def test_link_blob_to_doc(baseuploader, upload, tmp_path): + """Test link blob to document functionality""" file = tmp_path / f"{uuid4()}.txt" file.write_bytes(b"content") diff --git a/tests/unit/test_view.py b/tests/unit/test_view.py new file mode 100644 index 0000000000..27b02cb39b --- /dev/null +++ b/tests/unit/test_view.py @@ -0,0 +1,27 @@ +from unittest.mock import Mock + +from nxdrive.gui.view import FileModel + + +def test_foldersDialog(): + def func(): + return True + + file_model = FileModel(func) + returned_val = file_model.add_files([{"key": "val"}]) + assert not returned_val + + +def test_set_progress(direct_transfer_model): + """Test the finalize state after 100% progress""" + action = { + "engine": "51a2c2dc641311ee87fb...bfc0ec09fa", + "doc_pair": 1, + "progress": "100", + "action_type": "Linking", + "finalizing_status": "Finalize the status", + } + + direct_transfer_model.createIndex = Mock(return_value=1) + direct_transfer_model.setData = Mock() + direct_transfer_model.set_progress(direct_transfer_model, action) From 900dab5acf18fc208aca9e1115412e3bd6541e71 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Tue, 17 Oct 2023 17:21:51 +0530 Subject: [PATCH 14/24] NXDRIVE-2711: Show that upload is still alive for very large files --- tests/unit/test_view.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/unit/test_view.py b/tests/unit/test_view.py index 27b02cb39b..f5d78674fe 100644 --- a/tests/unit/test_view.py +++ b/tests/unit/test_view.py @@ -1,6 +1,7 @@ from unittest.mock import Mock from nxdrive.gui.view import FileModel +from nxdrive.qt.imports import QModelIndex def test_foldersDialog(): @@ -25,3 +26,12 @@ def test_set_progress(direct_transfer_model): direct_transfer_model.createIndex = Mock(return_value=1) direct_transfer_model.setData = Mock() direct_transfer_model.set_progress(direct_transfer_model, action) + + +def test_data(direct_transfer_model): + """Test get row data as per role""" + index = QModelIndex + index.row = Mock(return_value=0) + direct_transfer_model.data( + direct_transfer_model, index, direct_transfer_model.FINALIZING_STATUS + ) From 643f31786a9c80cdfda04c882fd68d60fc0e7a18 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Wed, 18 Oct 2023 18:01:53 +0530 Subject: [PATCH 15/24] NXDRIVE-2711: update testcase --- tests/unit/conftest.py | 12 -------- tests/unit/test_client_uploader.py | 46 ++++++++++++++++++++++++++---- 2 files changed, 40 insertions(+), 18 deletions(-) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 84cf8689c7..5ee49abf9f 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -7,7 +7,6 @@ import pytest from nxdrive.client.remote_client import Remote -from nxdrive.client.uploader import BaseUploader from nxdrive.constants import TransferStatus from nxdrive.dao.engine import EngineDAO from nxdrive.dao.manager import ManagerDAO @@ -146,11 +145,6 @@ def __init__(self, tmp_path): super().__init__(self, final_app) -class MockUploader(BaseUploader): - def __init__(self, Remote): - super().__init__(self, Remote) - - class MockDirectTransferModel(DirectTransferModel): def __init__( self, translate: Callable[..., Any], /, *, parent: QObject = None @@ -207,12 +201,6 @@ def processor(engine, engine_dao): return processor -@pytest.fixture() -def baseuploader(): - baseuploader = MockUploader - return baseuploader - - @pytest.fixture() def upload(): upload = Upload diff --git a/tests/unit/test_client_uploader.py b/tests/unit/test_client_uploader.py index f3ce8194ee..e8ef8ce23a 100644 --- a/tests/unit/test_client_uploader.py +++ b/tests/unit/test_client_uploader.py @@ -1,17 +1,51 @@ from unittest.mock import Mock from uuid import uuid4 +import pytest +import requests from nuxeo.models import FileBlob +from nxdrive.client.remote_client import Remote +from nxdrive.client.uploader import BaseUploader -def test_link_blob_to_doc(baseuploader, upload, tmp_path): - """Test link blob to document functionality""" + +@pytest.fixture +def baseuploader(): + remote = Remote + remote.dao = Mock() + baseuploader = BaseUploader(remote) + return baseuploader + + +def test_link_blob_to_doc(baseuploader, upload, tmp_path, monkeypatch): + """Test system network and server side exception handling while linking blob to document""" file = tmp_path / f"{uuid4()}.txt" file.write_bytes(b"content") - baseuploader.dao = Mock() - baseuploader._transfer_autoType_file = Mock() + def mock_transfer_autoType_file(*args, **kwargs): + raise requests.exceptions.RequestException("Connection Error") + + monkeypatch.setattr( + baseuploader, "_transfer_autoType_file", mock_transfer_autoType_file + ) + + # server side exceptions + with pytest.raises(requests.exceptions.RequestException): + baseuploader.link_blob_to_doc( + "Filemanager.Import", upload, FileBlob(str(file)), False + ) - baseuploader.link_blob_to_doc( - baseuploader, "Filemanager.Import", upload, FileBlob(str(file)), True + def mock_transfer_autoType_file(*args, **kwargs): + raise requests.exceptions.RequestException( + "TCPKeepAliveHTTPSConnectionPool: Connection Error" + ) + + monkeypatch.setattr( + baseuploader, "_transfer_autoType_file", mock_transfer_autoType_file ) + + # system network disconnect + with pytest.raises(requests.exceptions.RequestException): + baseuploader.link_blob_to_doc( + "Filemanager.Import", upload, FileBlob(str(file)), False + ) From 894e6a6ac8f89adcfebcb9c6c02b51a7e1b87624 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Wed, 25 Oct 2023 17:11:26 +0530 Subject: [PATCH 16/24] NXDRIVE-2711:Show that upload is still alive for very large files --- docs/changes/5.3.3.md | 3 ++- nxdrive/client/uploader/__init__.py | 8 +++----- nxdrive/engine/activity.py | 20 +++++++------------- 3 files changed, 12 insertions(+), 19 deletions(-) diff --git a/docs/changes/5.3.3.md b/docs/changes/5.3.3.md index b3c540769e..ff96b8fdfc 100644 --- a/docs/changes/5.3.3.md +++ b/docs/changes/5.3.3.md @@ -83,6 +83,7 @@ Release date: `2023-xx-xx` - Upgraded `typing-extensions` from 4.0.1 to 4.7.1 - Upgraded `vulture` from 2.3 to 2.9.1 - Upgraded `wcwidth` from 0.2.5 to 0.2.6 + ## Technical Changes -- +- Added `finalizing_status` attribute in LinkingAction class diff --git a/nxdrive/client/uploader/__init__.py b/nxdrive/client/uploader/__init__.py index c18df802d2..50472802e9 100644 --- a/nxdrive/client/uploader/__init__.py +++ b/nxdrive/client/uploader/__init__.py @@ -444,7 +444,6 @@ def link_blob_to_doc( engine=transfer.engine, doc_pair=transfer.doc_pair, ) - action.finalizing_status = "" action.is_direct_transfer = transfer.is_direct_transfer if "headers" in kwargs: kwargs["headers"].update(headers) @@ -462,10 +461,9 @@ def link_blob_to_doc( err = f"Error while linking blob to doc: {exc!r}" log.warning(err) action.finalizing_status = "Error" - if "TCPKeepAliveHTTPSConnectionPool" in str(exc): - raise exc - transfer.request_uid = str(uuid4()) - self.dao.update_upload_requestid(transfer) + if "TCPKeepAliveHTTPSConnectionPool" not in str(exc): + transfer.request_uid = str(uuid4()) + self.dao.update_upload_requestid(transfer) raise exc finally: action.finish_action() diff --git a/nxdrive/engine/activity.py b/nxdrive/engine/activity.py index 73572e55fd..c4436469ff 100644 --- a/nxdrive/engine/activity.py +++ b/nxdrive/engine/activity.py @@ -19,14 +19,11 @@ class Action(QObject): actions: Dict[int, Optional["Action"]] = {} - def __init__( - self, action_type: str, /, *, progress: float = 0.0, finalizing_status: str = "" - ) -> None: + def __init__(self, action_type: str, /, *, progress: float = 0.0) -> None: super().__init__() self.type = action_type self._progress = progress - self._finalizing_status = finalizing_status self.size = 0 self.uid = str(uuid.uuid4()) @@ -46,14 +43,6 @@ def progress(self, value: float, /) -> None: def get_percent(self) -> float: return self.progress - @property - def finalizing_status(self) -> str: - return self._finalizing_status - - @finalizing_status.setter - def finalizing_status(self, value: str, /) -> None: - self._finalizing_status = value - @staticmethod def get_actions() -> Dict[int, Optional["Action"]]: return Action.actions.copy() @@ -77,7 +66,6 @@ def export(self) -> Dict[str, Any]: "uid": self.uid, "action_type": self.type, "progress": self.get_percent(), - "finalizing_status": self.finalizing_status, } def __repr__(self) -> str: @@ -279,6 +267,12 @@ def __init__( ) self.progress = size + def export(self) -> Dict[str, Any]: + return { + **super().export(), + "finalizing_status": self.finalizing_status, + } + def tooltip(doing: str): # type: ignore def action_decorator(func): # type: ignore From 3fda071821888bb7ca230d4ef4928849e7a8c6a0 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Wed, 25 Oct 2023 17:21:11 +0530 Subject: [PATCH 17/24] NXDRIVE-2711:Show that upload is still alive for very large files --- tests/unit/test_action.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/unit/test_action.py b/tests/unit/test_action.py index 68c125672a..fbc7d1ecdd 100644 --- a/tests/unit/test_action.py +++ b/tests/unit/test_action.py @@ -34,12 +34,6 @@ def test_action(): action.progress = 100.0 details = action.export() assert details["progress"] == 100.0 - Action.finish_action() - - # Test finalizing_status property setter - action.finalizing_status = "Finalize the upload" - details = action.export() - assert details["finalizing_status"] == "Finalize the upload" Action.finish_action() actions = Action.get_actions() From 2de2bf4d69fb98b391947d0b5e3e337b86135877 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Wed, 25 Oct 2023 18:10:50 +0530 Subject: [PATCH 18/24] NXDRIVE-2711:Show that upload is still alive for very large files --- tests/unit/test_action.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/unit/test_action.py b/tests/unit/test_action.py index fbc7d1ecdd..efe8604722 100644 --- a/tests/unit/test_action.py +++ b/tests/unit/test_action.py @@ -244,6 +244,9 @@ def test_finalization_action(tmp): action = LinkingAction(filepath, filepath.stat().st_size) assert action.type == "Linking" + action.finalizing_status = "Error occured while linking" + details = action.export() + assert details["finalizing_status"] == "Error occured while linking" Action.finish_action() assert action.finished From 1c921c44ac3a4f91085cd3526c12ea66e56427a0 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Wed, 25 Oct 2023 18:12:53 +0530 Subject: [PATCH 19/24] NXDRIVE-2711:Show that upload is still alive for very large files --- tests/unit/test_action.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_action.py b/tests/unit/test_action.py index efe8604722..1bff8b34bc 100644 --- a/tests/unit/test_action.py +++ b/tests/unit/test_action.py @@ -244,9 +244,9 @@ def test_finalization_action(tmp): action = LinkingAction(filepath, filepath.stat().st_size) assert action.type == "Linking" - action.finalizing_status = "Error occured while linking" + action.finalizing_status = "Error occurred while linking" details = action.export() - assert details["finalizing_status"] == "Error occured while linking" + assert details["finalizing_status"] == "Error occurred while linking" Action.finish_action() assert action.finished From 7cb3f277294e43845fe54ebf587b364760e3d077 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Thu, 26 Oct 2023 12:10:10 +0530 Subject: [PATCH 20/24] NXDRIVE-2711:Show that upload is still alive for very large files --- nxdrive/engine/activity.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nxdrive/engine/activity.py b/nxdrive/engine/activity.py index c4436469ff..c22a70900a 100644 --- a/nxdrive/engine/activity.py +++ b/nxdrive/engine/activity.py @@ -266,6 +266,7 @@ def __init__( doc_pair=doc_pair, ) self.progress = size + self.finalizing_status = "" def export(self) -> Dict[str, Any]: return { From e8501e3ba31f6f2180ed47096945cca1ce5bb855 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Thu, 26 Oct 2023 14:19:08 +0530 Subject: [PATCH 21/24] NXDRIVE-2711:Show that upload is still alive for very large files --- tests/unit/test_report.py | 3 ++- tests/unit/test_session_csv.py | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_report.py b/tests/unit/test_report.py index 446434a9ba..221c08e096 100644 --- a/tests/unit/test_report.py +++ b/tests/unit/test_report.py @@ -1,12 +1,13 @@ from logging import getLogger from nxdrive.manager import Manager +from nxdrive.options import Options from nxdrive.report import Report def test_logs(tmp): log = getLogger(__name__) - + print(f">>>>> {Options.update_site_url}") with Manager(tmp()) as manager: log.info("Strange encoding \xe8 \xe9") diff --git a/tests/unit/test_session_csv.py b/tests/unit/test_session_csv.py index 2859bedd82..83d52c05b9 100644 --- a/tests/unit/test_session_csv.py +++ b/tests/unit/test_session_csv.py @@ -1,6 +1,7 @@ from nxdrive.constants import TransferStatus from nxdrive.manager import Manager from nxdrive.objects import Session +from nxdrive.options import Options from nxdrive.session_csv import SessionCsv @@ -18,6 +19,7 @@ def test_csv_generation(tmp): description="icons-svg (+9)", planned_items=10, ) + print(f">>>>> {Options.update_site_url}") with Manager(tmp()) as manager: session_csv = SessionCsv(manager, session) From ae9a1a809e87f6b2ef602338dd432cad6f553d62 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Thu, 26 Oct 2023 14:37:22 +0530 Subject: [PATCH 22/24] NXDRIVE-2711:Show that upload is still alive for very large files --- tests/unit/test_report.py | 2 -- tests/unit/test_session_csv.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/tests/unit/test_report.py b/tests/unit/test_report.py index 221c08e096..9edde22eaa 100644 --- a/tests/unit/test_report.py +++ b/tests/unit/test_report.py @@ -1,13 +1,11 @@ from logging import getLogger from nxdrive.manager import Manager -from nxdrive.options import Options from nxdrive.report import Report def test_logs(tmp): log = getLogger(__name__) - print(f">>>>> {Options.update_site_url}") with Manager(tmp()) as manager: log.info("Strange encoding \xe8 \xe9") diff --git a/tests/unit/test_session_csv.py b/tests/unit/test_session_csv.py index 83d52c05b9..2859bedd82 100644 --- a/tests/unit/test_session_csv.py +++ b/tests/unit/test_session_csv.py @@ -1,7 +1,6 @@ from nxdrive.constants import TransferStatus from nxdrive.manager import Manager from nxdrive.objects import Session -from nxdrive.options import Options from nxdrive.session_csv import SessionCsv @@ -19,7 +18,6 @@ def test_csv_generation(tmp): description="icons-svg (+9)", planned_items=10, ) - print(f">>>>> {Options.update_site_url}") with Manager(tmp()) as manager: session_csv = SessionCsv(manager, session) From 345078cad0aeab045c604b40d45f5bec31cd2cbf Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Thu, 26 Oct 2023 15:12:10 +0530 Subject: [PATCH 23/24] NXDRIVE-2711:Show that upload is still alive for very large files --- tests/unit/test_report.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/unit/test_report.py b/tests/unit/test_report.py index 9edde22eaa..446434a9ba 100644 --- a/tests/unit/test_report.py +++ b/tests/unit/test_report.py @@ -6,6 +6,7 @@ def test_logs(tmp): log = getLogger(__name__) + with Manager(tmp()) as manager: log.info("Strange encoding \xe8 \xe9") From c21d4f14b0cfec0de523dc463ee3a61c89ae7cb7 Mon Sep 17 00:00:00 2001 From: swetayadav1 Date: Thu, 2 Nov 2023 14:41:02 +0530 Subject: [PATCH 24/24] NXDRIVE-2711: Sourcery refactored --- nxdrive/client/uploader/__init__.py | 11 +++++------ nxdrive/engine/activity.py | 3 +-- tests/unit/test_client_uploader.py | 3 +-- tests/unit/test_engine_dao.py | 2 +- 4 files changed, 8 insertions(+), 11 deletions(-) diff --git a/nxdrive/client/uploader/__init__.py b/nxdrive/client/uploader/__init__.py index 50472802e9..601d90db8c 100644 --- a/nxdrive/client/uploader/__init__.py +++ b/nxdrive/client/uploader/__init__.py @@ -451,12 +451,11 @@ def link_blob_to_doc( kwargs["headers"] = headers try: doc_type = kwargs.get("doc_type", "") - if transfer.is_direct_transfer and doc_type and doc_type != "": - res = self._transfer_docType_file(transfer, headers, doc_type) - else: - res = self._transfer_autoType_file(command, blob, kwargs) - - return res + return ( + self._transfer_docType_file(transfer, headers, doc_type) + if transfer.is_direct_transfer and doc_type and doc_type != "" + else self._transfer_autoType_file(command, blob, kwargs) + ) except Exception as exc: err = f"Error while linking blob to doc: {exc!r}" log.warning(err) diff --git a/nxdrive/engine/activity.py b/nxdrive/engine/activity.py index c22a70900a..b8713cefc3 100644 --- a/nxdrive/engine/activity.py +++ b/nxdrive/engine/activity.py @@ -54,8 +54,7 @@ def get_current_action(*, thread_id: int = None) -> Optional["Action"]: @staticmethod def finish_action() -> None: - action = Action.actions.pop(current_thread_id(), None) - if action: + if action := Action.actions.pop(current_thread_id(), None): action.finish() def finish(self) -> None: diff --git a/tests/unit/test_client_uploader.py b/tests/unit/test_client_uploader.py index e8ef8ce23a..f1a2dff404 100644 --- a/tests/unit/test_client_uploader.py +++ b/tests/unit/test_client_uploader.py @@ -13,8 +13,7 @@ def baseuploader(): remote = Remote remote.dao = Mock() - baseuploader = BaseUploader(remote) - return baseuploader + return BaseUploader(remote) def test_link_blob_to_doc(baseuploader, upload, tmp_path, monkeypatch): diff --git a/tests/unit/test_engine_dao.py b/tests/unit/test_engine_dao.py index 1c7f62caa6..1b2a07c2eb 100644 --- a/tests/unit/test_engine_dao.py +++ b/tests/unit/test_engine_dao.py @@ -409,7 +409,7 @@ def test_migration_db_v10(engine_dao): """Verify Downloads after migration from v9 to v10.""" with engine_dao("engine_migration_10.db") as dao: downloads = list(dao.get_downloads()) - assert len(downloads) == 0 + assert not downloads states = list(dao.get_states_from_partial_local(Path())) assert len(states) == 4