Skip to content

Commit

Permalink
Merge pull request #1062 from debrief/for_release
Browse files Browse the repository at this point in the history
Update version, for new release
  • Loading branch information
IanMayo authored Oct 12, 2021
2 parents d7eba24 + 87e20dd commit f4e95c0
Show file tree
Hide file tree
Showing 7 changed files with 108 additions and 4 deletions.
6 changes: 6 additions & 0 deletions HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@
History
=======


0.0.31 (2021-10-12)
-------------------

* Fix for Link16 importer handling unexpected binary content in CSV

0.0.30 (2021-10-05)
-------------------

Expand Down
5 changes: 4 additions & 1 deletion importers/link_16_importer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import re
from datetime import datetime, timedelta
from string import printable

from tqdm import tqdm

Expand Down Expand Up @@ -44,7 +45,9 @@ def can_load_this_filename(self, filename):
def can_load_this_header(self, header):
# V1 starts w/ PPLI
# V2 starts w/ Xmt/Rcv
return header.startswith(V1_HEADER) or header.startswith(V2_HEADER)
# Strip any non-ASCII characters from the header
ascii_header = "".join(char for char in header if char in printable)
return ascii_header.startswith(V1_HEADER) or ascii_header.startswith(V2_HEADER)

def can_load_this_file(self, file_contents):
return True
Expand Down
2 changes: 1 addition & 1 deletion pepys_import/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@

__author__ = "Ian Mayo"
__email__ = "[email protected]"
__version__ = "0.0.30"
__version__ = "0.0.31"
__build_timestamp__ = None
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.0.30
current_version = 0.0.31
commit = True
tag = True

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,6 @@
test_suite="tests",
tests_require=test_requirements,
url="https://github.com/debrief/pepys-import",
version="0.0.30",
version="0.0.31",
zip_safe=False,
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Xmt/Rcv,SlotTime,Set,FrameIndex,STN,RI,Packing,PG,Jx.y,MLI,ADDI,RC,TN,Lat,Long,Alt,Crs,Spd,Id,|,1,2,3,4,5,6,7,8
SomeStr,59:31.6,B,550,2,0,SomeStr,6,J2.2,3,0,0,1832,0.953371727,0.458539269,25124,5,0.972404565,Friend or Foe,|,Some Hash,Some Hash,,,,,,
SomeStr,18:45.2,B,560,669,1,SomeStr,6,J3.3,6,0,0,1580,0.762199436,0.296645626,76355,26,0.832872258,Friend or Foe,|,Some Hash,Some Hash,,,,,,
92 changes: 92 additions & 0 deletions tests/test_load_link16.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,11 @@
"sample_data/track_files/Link16/GEV_no_timestamp.raw-PPLI_201.csv",
)

DATA_PATH_BINARY_IN_HEADER = os.path.join(
FILE_PATH,
"sample_data/track_files/Link16/V2_GEV_binary_in_header_16-05-2021T00-00-00.raw-SLOTS_JMSG.csv",
)


class TestLoadLink16(unittest.TestCase):
def setUp(self):
Expand Down Expand Up @@ -225,6 +230,20 @@ def test_file_with_datetime_in_middle_of_filename(self):
processor = FileProcessor(archive=False)
processor.register_importer(Link16Importer())

# check states empty
with self.store.session_scope():
# there must be no states at the beginning
states = self.store.session.query(self.store.db_classes.State).all()
assert len(states) == 0

# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0

# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0

# parse the data
processor.process(DATA_PATH_MIDDLE_DATE, self.store, False)

Expand Down Expand Up @@ -266,6 +285,20 @@ def test_file_with_hours(self):
processor = FileProcessor(archive=False)
processor.register_importer(Link16Importer())

# check states empty
with self.store.session_scope():
# there must be no states at the beginning
states = self.store.session.query(self.store.db_classes.State).all()
assert len(states) == 0

# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0

# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0

# parse the data
processor.process(DATA_PATH_HOURS_IN_DATA, self.store, False)

Expand Down Expand Up @@ -331,6 +364,20 @@ def test_non_zero_timestamp(self):
processor = FileProcessor(archive=False)
processor.register_importer(Link16Importer())

# check states empty
with self.store.session_scope():
# there must be no states at the beginning
states = self.store.session.query(self.store.db_classes.State).all()
assert len(states) == 0

# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0

# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0

# parse the data
processor.process(DATA_PATH_TIGHT_ROLLOVER, self.store, False)

Expand Down Expand Up @@ -360,6 +407,51 @@ def test_non_zero_timestamp(self):
assert results[2].time == datetime(2021, 5, 9, 10, 46, 38, 100000)
assert results[3].time == datetime(2021, 5, 9, 11, 38, 18, 0)

def test_binary_in_header(self):
processor = FileProcessor(archive=False)
processor.register_importer(Link16Importer())

# check states empty
with self.store.session_scope():
# there must be no states at the beginning
states = self.store.session.query(self.store.db_classes.State).all()
assert len(states) == 0

# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0

# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0

# parse the data
processor.process(DATA_PATH_BINARY_IN_HEADER, self.store, False)

# check data got created
with self.store.session_scope():
# there must be states after the import
states = self.store.session.query(self.store.db_classes.State).all()
self.assertEqual(len(states), 2)

# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
self.assertEqual(len(platforms), 2)

# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
self.assertEqual(len(datafiles), 1)

# Heading changed to control ordering
results = (
self.store.session.query(self.store.db_classes.State)
.order_by(self.store.db_classes.State.heading)
.all()
)
assert len(results) == 2
assert results[0].time == datetime(2021, 5, 16, 0, 59, 31, 600000)
assert results[1].time == datetime(2021, 5, 16, 1, 18, 45, 200000)


if __name__ == "__main__":
unittest.main()

0 comments on commit f4e95c0

Please sign in to comment.