Skip to content

Commit fd8804d

Browse files
committed
rebuild and retest
1 parent 9c9a9ca commit fd8804d

34 files changed

+2111
-1258
lines changed

build/lib/data_algebra/BigQuery.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -208,9 +208,7 @@ def example_handle():
208208
assert _have_bigquery
209209
credential_file = "/Users/johnmount/big_query/big_query_jm.json"
210210
# assert os.path.isfile(credential_file)
211-
os.environ[
212-
"GOOGLE_APPLICATION_CREDENTIALS"
213-
] = credential_file
211+
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = credential_file
214212
# os.environ["GOOGLE_APPLICATION_CREDENTIALS"] # trigger key error if not present
215213
try:
216214
data_catalog = "data-algebra-test"

build/lib/data_algebra/PostgreSQL.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
class PostgreSQLModel(data_algebra.db_model.DBModel):
2222
"""A model of how SQL should be generated for PostgreSQL.
23-
Assuming we are using a sqlalhemy engine as our connection
23+
Assuming we are using a sqlalchemy engine as our connection
2424
"""
2525

2626
def __init__(self):

build/lib/data_algebra/SQLite.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,9 @@ def __init__(self):
5858
identifier_quote='"',
5959
string_quote="'",
6060
sql_formatters=SQLite_formatters,
61-
on_joiner='AND',
62-
union_all_term_start='',
63-
union_all_term_end='',
61+
on_joiner="AND",
62+
union_all_term_start="",
63+
union_all_term_end="",
6464
)
6565

6666
def prepare_connection(self, conn):

build/lib/data_algebra/SparkSQL.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,9 +70,9 @@ def __init__(self, *, spark_context, spark_session):
7070
self.spark_session = spark_session
7171

7272
def close(self):
73-
if self.spark_conext is not None:
74-
self.spark_conext.stop() # probably only for local demos
75-
self.spark_conext = None
73+
if self.spark_context is not None:
74+
self.spark_context.stop() # probably only for local demos
75+
self.spark_context = None
7676
if self.spark_session is not None:
7777
self.spark_session = None
7878

build/lib/data_algebra/data_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ def _type_safe_is_in(a, b):
8686
type_a = data_algebra.util.guess_carried_scalar_type(a)
8787
type_b = {data_algebra.util.map_type_to_canonical(type(v)) for v in b}
8888
if len(type_b) > 1:
89-
raise TypeError(f'multiple types in set: {type_b}')
89+
raise TypeError(f"multiple types in set: {type_b}")
9090
type_b = list(type_b)[0]
9191
if not data_algebra.util.compatible_types([type_a, type_b]):
9292
raise TypeError(f"can't check for an {type_a} in a set of {type_b}'s")

0 commit comments

Comments
 (0)