Skip to content

Commit

Permalink
Escape while loops for corrupted files
Browse files Browse the repository at this point in the history
  • Loading branch information
jmcvey3 committed Jan 9, 2025
1 parent e9dd268 commit efcc5fb
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 0 deletions.
3 changes: 3 additions & 0 deletions mhkit/dolfyn/io/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,9 @@ def _handle_nan(data):
Finds trailing nan's that cause issues in running the rotation
algorithms and deletes them.
"""
if "time" not in data["coords"]:
raise Exception("No data recorded in file.")

nan = np.zeros(data["coords"]["time"].shape, dtype=bool)
l = data["coords"]["time"].size

Expand Down
9 changes: 9 additions & 0 deletions mhkit/dolfyn/io/nortek.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,7 @@ def __init__(
self.config["coord_sys_axes"]
]
da["has_imu"] = 0 # Initiate attribute
self._eof = self.pos
if self.debug:
logging.info("Init completed")

Expand Down Expand Up @@ -384,13 +385,17 @@ def findnext(self, do_cs=True):
if self.endian == "<":
func = np.uint8
func2 = lib._bitshift8
searching = False
while True:
val = unpack(self.endian + "H", self.read(2))[0]
if np.array(val).astype(func) == 165 and (not do_cs or cs == sum):
self.f.seek(-2, 1)
return hex(func2(val))
sum += cs
cs = val
if self.debug and not searching:
logging.debug("Scanning every 2 bytes for next datablock...")
searching = True

def read_id(self):
"""Read the next 'ID' from the file."""
Expand Down Expand Up @@ -456,13 +461,17 @@ def findnextid(self, id):
id = int(id, 0)
nowid = None
while nowid != id:
pos = self.pos
nowid = self.read_id()
if nowid == 16:
shift = 22
else:
sz = 2 * unpack(self.endian + "H", self.read(2))[0]
shift = sz - 4
self.f.seek(shift, 1)
# If we get stuck in a while loop
if self.pos == pos:
self.f.seek(2, 1)
return self.pos

def code_spacing(self, searchcode, iternum=50):
Expand Down

0 comments on commit efcc5fb

Please sign in to comment.