More efficient ? .dat file reading
This commit is contained in:
parent
2eea373e79
commit
a59bcccde7
2 changed files with 16 additions and 21 deletions
|
|
@ -16,14 +16,12 @@ class ReadSwash:
|
|||
|
||||
@classmethod
|
||||
def read_nohead(cls, path):
|
||||
with tempfile.TemporaryFile(mode="w+t") as tmpfile:
|
||||
log.info(f"Replacing \\s with \\n in '{path}'")
|
||||
subprocess.run(("sed", r"s/\s\+/\n/g;/^$/d", path), stdout=tmpfile, text=True)
|
||||
log.info(f"Loading '{path}'")
|
||||
tmpfile.seek(0)
|
||||
a = np.asarray(tmpfile.readlines(), dtype=float)
|
||||
log.debug(f"path={a}")
|
||||
return a
|
||||
data = []
|
||||
with path.open() as inp:
|
||||
for line in inp:
|
||||
data += line.split()
|
||||
|
||||
return np.asarray(data, dtype=float)
|
||||
|
||||
def read_time(self, path):
|
||||
self._t = np.unique(self.read_nohead(path))
|
||||
|
|
|
|||
Reference in a new issue