Skip to content

Commit b6db7ce

Browse files
committed
Now supporting writing data to Postgres DB, created requirements.txt file, filled in README.md, updated .gitignore
1 parent 5dd392d commit b6db7ce

File tree

8 files changed

+308
-10
lines changed

8 files changed

+308
-10
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
.DS_Store
2+
__pycache__
23
*.csv

README.md

Lines changed: 119 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,120 @@
1-
Under development. README to be populated later.
1+
This library makes use of [pynmea2](https://github.com/Knio/pynmea2) to parse through input NMEA data, organize it, and output it to CSV files or to a PostgreSQL database.
22

3-
Reference materials:
4-
https://www.trimble.com/OEM_ReceiverHelp/V4.44/en/NMEA-0183messages_MessageOverview.html
3+
## Setup
4+
5+
Your input file should have a format similiar to those under `test_data`. To have your data datetime stamped, it must be in a format like that of `test_data/test_data_all.nmea`, with RMC sentences containing date and time stamps proceed other sentences in the same cycle.
6+
7+
If working with a database, the database access information/credentials must be setup in `db_creds.py`.
8+
9+
10+
## Usage
11+
```
12+
$ cd ~/Downloads/nmea_parser/
13+
$ pip install -r requirements.txt
14+
...
15+
$ python nmea_parser.py --help
16+
usage: nmea_parser.py [-h] [--drop_previous_db_tables] filepath {csv,db,both}
17+
18+
positional arguments:
19+
filepath file system path to file containing NMEA data
20+
{csv,db,both} where to output data: CSV files, database, or both
21+
22+
optional arguments:
23+
-h, --help show this help message and exit
24+
--drop_previous_db_tables
25+
drop previous DB tables before importing new data;
26+
only applies when output_method is 'db' or 'both'
27+
```
28+
## Examples
29+
### Example 1
30+
```
31+
$ ls -l *.csv
32+
ls: *.csv: No such file or directory
33+
$ python nmea_parser.py test_data/test_data_all.nmea csv
34+
35+
Reading in data... done.
36+
37+
Processing data... done.
38+
39+
Writing data to CSVs... data from logfile 'test_data/test_data_all.nmea' written to:
40+
test_data_all_GNRMC.csv
41+
test_data_all_GNVTG.csv
42+
test_data_all_GNGGA.csv
43+
test_data_all_GNGSA.csv
44+
test_data_all_GPGSV.csv
45+
test_data_all_GLGSV.csv
46+
test_data_all_GNGLL.csv
47+
done.
48+
49+
All done. Exiting.
50+
51+
52+
$ ls -l *.csv
53+
-rw-r--r-- 1 Thomas staff 14310 Dec 30 18:19 test_data_all_GLGSV.csv
54+
-rw-r--r-- 1 Thomas staff 9502 Dec 30 18:19 test_data_all_GNGGA.csv
55+
-rw-r--r-- 1 Thomas staff 6852 Dec 30 18:19 test_data_all_GNGLL.csv
56+
-rw-r--r-- 1 Thomas staff 18472 Dec 30 18:19 test_data_all_GNGSA.csv
57+
-rw-r--r-- 1 Thomas staff 8672 Dec 30 18:19 test_data_all_GNRMC.csv
58+
-rw-r--r-- 1 Thomas staff 5779 Dec 30 18:19 test_data_all_GNVTG.csv
59+
-rw-r--r-- 1 Thomas staff 40263 Dec 30 18:19 test_data_all_GPGSV.csv
60+
```
61+
62+
### Example 2
63+
```
64+
$ python nmea_parser.py test_data/test_data_all.nmea db
65+
66+
Reading in data... done.
67+
68+
Processing data... done.
69+
70+
Writing data to database... data from logfile 'test_data/test_data_all.nmea' written to:
71+
'nmea_gn_rmc' table in 'nmea_data' database
72+
'nmea_gn_vtg' table in 'nmea_data' database
73+
'nmea_gn_gga' table in 'nmea_data' database
74+
'nmea_gn_gsa' table in 'nmea_data' database
75+
'nmea_gp_gsv' table in 'nmea_data' database
76+
'nmea_gl_gsv' table in 'nmea_data' database
77+
'nmea_gn_gll' table in 'nmea_data' database
78+
done.
79+
80+
All done. Exiting.
81+
```
82+
83+
### Example 3
84+
```
85+
$ python nmea_parser.py test_data/test_data_all.nmea both
86+
87+
Reading in data... done.
88+
89+
Processing data... done.
90+
91+
Writing data to CSVs... data from logfile 'test_data/test_data_all.nmea' written to:
92+
test_data_all_GNRMC.csv
93+
test_data_all_GNVTG.csv
94+
test_data_all_GNGGA.csv
95+
test_data_all_GNGSA.csv
96+
test_data_all_GPGSV.csv
97+
test_data_all_GLGSV.csv
98+
test_data_all_GNGLL.csv
99+
done.
100+
101+
Writing data to database... data from logfile 'test_data/test_data_all.nmea' written to:
102+
'nmea_gn_rmc' table in 'nmea_data' database
103+
'nmea_gn_vtg' table in 'nmea_data' database
104+
'nmea_gn_gga' table in 'nmea_data' database
105+
'nmea_gn_gsa' table in 'nmea_data' database
106+
'nmea_gp_gsv' table in 'nmea_data' database
107+
'nmea_gl_gsv' table in 'nmea_data' database
108+
'nmea_gn_gll' table in 'nmea_data' database
109+
done.
110+
111+
All done. Exiting.
112+
```
113+
114+
115+
## References Used in Development
116+
https://github.com/Knio/pynmea2/blob/master/README.md
117+
118+
https://www.trimble.com/OEM_ReceiverHelp/V4.44/en/NMEA-0183messages_MessageOverview.html
119+
120+
https://www.u-blox.com/sites/default/files/products/documents/u-blox8-M8_ReceiverDescrProtSpec_%28UBX-13003221%29.pdf (section 31 'NMEA Protocol')

db_creds.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
DB_USER = "postgres"
2+
DB_PASSWORD = "postgres"
3+
DB_HOST = "localhost"
4+
DB_PORT = "5432"
5+
DB_NAME = "nmea_data"

db_data_import.py

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
IF_EXISTS_OPT = 'append' # 'fail', 'replace', or 'append', see https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_sql.html
2+
3+
4+
import os
5+
import sqlalchemy #import create_engine
6+
7+
# Local modules/libary files:
8+
import db_creds
9+
10+
11+
def send_data_to_db(log_file_path, dfs, table_name_base, table_name_suffixes=None):
12+
13+
log_file_name = os.path.basename(log_file_path)
14+
15+
db_access_str = f'postgresql://{db_creds.DB_USER}:{db_creds.DB_PASSWORD}@{db_creds.DB_HOST}:{db_creds.DB_PORT}/{db_creds.DB_NAME}'
16+
engine = sqlalchemy.create_engine(db_access_str)
17+
18+
table_names = []
19+
20+
# Put data in database
21+
for df_idx, df in enumerate(dfs):
22+
23+
if_exists_opt_loc = IF_EXISTS_OPT
24+
25+
table_name = table_name_base
26+
if table_name_suffixes:
27+
table_name = table_name + '_' + table_name_suffixes[df_idx]
28+
29+
df.to_sql(table_name, engine, method='multi', if_exists=if_exists_opt_loc)
30+
31+
table_names.append(table_name)
32+
33+
return table_names
34+
35+
# TODO: Create separate table for log file IDs and names. Check what the current larged ID is, then append a column to
36+
# the dfs with that ID + 1, and a row to the log file table with that ID and the log file name, or something like that

db_table_lists.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
nmea_tables = [
2+
'nmea_gl_gsv',
3+
'nmea_gn_gga',
4+
'nmea_gn_gll',
5+
'nmea_gn_gsa',
6+
'nmea_gn_rmc',
7+
'nmea_gn_vtg',
8+
'nmea_gp_gsv',
9+
]

db_utils.py

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
import psycopg2
2+
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
3+
import functools
4+
print = functools.partial(print, flush=True) # Prevent print statements from buffering till end of execution
5+
6+
# Local modules/libary files:
7+
import db_creds
8+
import db_table_lists
9+
10+
11+
def drop_db_tables(tables_to_drop, verbose=False):
12+
13+
[psqlCon, psqlCursor] = setup_db_connection()
14+
15+
# Drop tables
16+
tableList = ""
17+
for idx, tableName in enumerate(tables_to_drop):
18+
tableList = tableList + tableName
19+
if idx < len(tables_to_drop)-1: # Don't append comma after last table name
20+
tableList = tableList + ", "
21+
if verbose:
22+
print(f"Dropping database table {tableName} (and any dependent objects) if it exists.")
23+
24+
dropTableStmt = f"DROP TABLE IF EXISTS \"{tableName}\" CASCADE;" # Quotes arouund table names are required for case sensitivity
25+
psqlCursor.execute(dropTableStmt);
26+
27+
free_db_connection(psqlCon, psqlCursor)
28+
29+
30+
def create_table(table_name, columns=None):
31+
32+
db_command = f"""
33+
CREATE TABLE IF NOT EXISTS "{table_name}" (
34+
"""
35+
36+
if columns:
37+
for idx, column in enumerate(columns):
38+
db_command = db_command + '"' + column['name'] + '" ' + column['datatype']
39+
if idx < len(columns)-1: # Don't append a comman after the last column declaration
40+
db_command = db_command + ','
41+
42+
db_command = db_command + ')'
43+
44+
run_db_command(db_command)
45+
46+
47+
def run_db_command(db_command):
48+
49+
[psqlCon, psqlCursor] = setup_db_connection()
50+
51+
# Run command on database
52+
psqlCursor.execute(db_command);
53+
54+
# print(psqlCon.notices)
55+
# print(psqlCon.notifies)
56+
57+
free_db_connection(psqlCon, psqlCursor)
58+
59+
60+
def setup_db_connection():
61+
62+
db_access_str = f'postgresql://{db_creds.DB_USER}:{db_creds.DB_PASSWORD}@{db_creds.DB_HOST}:{db_creds.DB_PORT}/{db_creds.DB_NAME}'
63+
64+
# Start a PostgreSQL database session
65+
psqlCon = psycopg2.connect(db_access_str);
66+
psqlCon.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT);
67+
68+
# Open a database cursor
69+
psqlCursor = psqlCon.cursor();
70+
71+
return [psqlCon, psqlCursor]
72+
73+
74+
def free_db_connection(psqlCon, psqlCursor):
75+
76+
# Free the resources
77+
psqlCursor.close();
78+
psqlCon.close();

nmea_parser.py

Lines changed: 49 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,27 @@
77
from collections import namedtuple
88
import re
99
import functools
10-
print = functools.partial(print, flush=True)
10+
print = functools.partial(print, flush=True) # Prevent print statements from buffering till end of execution
11+
12+
# Local modules/libary files:
13+
import db_data_import
14+
import db_creds
15+
import db_utils
16+
import db_table_lists
17+
1118

1219
def parse_and_validate_args():
1320

1421
parser = argparse.ArgumentParser()
15-
parser.add_argument("filepath", help="File system path to file containing NMEA data")
22+
parser.add_argument("filepath",
23+
help="file system path to file containing NMEA data")
24+
parser.add_argument("output_method",
25+
choices=['csv', 'db', 'both'],
26+
help="where to output data: CSV files, database, or both")
27+
parser.add_argument("--drop_previous_db_tables",
28+
action="store_true",
29+
help="drop previous DB tables before importing new data; only applies when output_method is 'db' or 'both'")
30+
1631
args = parser.parse_args()
1732

1833
if os.path.isfile(args.filepath):
@@ -203,9 +218,24 @@ def dfs_to_csv(sentence_dfs, input_file_path, verbose=False):
203218

204219
if verbose:
205220
if df_idx is 0: # If this is the first df
206-
print("data written to:")
221+
print(f"data from logfile '{input_file_path}' written to:")
207222
print(" " + filename)
208223

224+
225+
def dfs_to_db(sentence_dfs, input_file_path, verbose=False):
226+
227+
table_name_base = 'nmea'
228+
# Pass lowercase 'talker_sentencetype' as table name suffixes
229+
table_name_suffixes = [f"{df['talker'][0]}_{df['sentence_type'][0]}".lower() for df in sentence_dfs]
230+
231+
table_names = db_data_import.send_data_to_db(input_file_path, sentence_dfs, table_name_base, table_name_suffixes)
232+
233+
if verbose:
234+
print(f"data from logfile '{input_file_path}' written to:")
235+
for table_name in table_names:
236+
print(f" '{table_name}' table in '{db_creds.DB_NAME}' database")
237+
238+
209239
def get_sentence_type(sentence):
210240

211241
return sentence.talker + sentence.sentence_type
@@ -264,8 +294,9 @@ def expand_GSV_fields(fields):
264294

265295
def main():
266296

267-
print("\nReading in data... ", end="")
268297
args = parse_and_validate_args()
298+
299+
print("\nReading in data... ", end="")
269300
file = open_file(args.filepath)
270301
sentences = read_file(file)
271302
print("done.")
@@ -279,9 +310,20 @@ def main():
279310
sentence_dfs = sentences_to_dataframes(sentence_sets)
280311
print("done.")
281312

282-
print("\nWriting data to CSVs... ", end="")
283-
dfs_to_csv(sentence_dfs, args.filepath, verbose=True)
284-
print("done.")
313+
if (args.output_method == 'csv' or args.output_method == 'both'):
314+
print("\nWriting data to CSVs... ", end="")
315+
dfs_to_csv(sentence_dfs, args.filepath, verbose=True)
316+
print("done.")
317+
318+
if (args.output_method == 'db' or args.output_method == 'both'):
319+
320+
if args.drop_previous_db_tables:
321+
print()
322+
db_utils.drop_db_tables(db_table_lists.nmea_tables, verbose=True)
323+
324+
print("\nWriting data to database... ", end="")
325+
dfs_to_db(sentence_dfs, args.filepath, verbose=True)
326+
print("done.")
285327

286328
print("\nAll done. Exiting.\n\n")
287329

requirements.txt

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
2+
# nmea_parser/data_to_db.py: 5
3+
SQLAlchemy == 1.3.22
4+
5+
# nmea_parser/nmea_parser.py: 6
6+
pandas == 1.2.0
7+
8+
# nmea_parser/nmea_parser.py: 1
9+
pynmea2 == 1.15.0
10+
11+
psycopg2-binary >= 2.8.6

0 commit comments

Comments
 (0)