Skip to content

Commit 3f0d08d

Browse files
committed
black source code
1 parent c136f59 commit 3f0d08d

File tree

16 files changed

+289
-120
lines changed

16 files changed

+289
-120
lines changed

data_algebra/BigQuery.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ def _bigquery_ieee_divide_expr(dbmodel, expression):
107107

108108
class BigQueryModel(data_algebra.db_model.DBModel):
109109
"""A model of how SQL should be generated for BigQuery
110-
connection should be google.cloud.bigquery.client.Client"""
110+
connection should be google.cloud.bigquery.client.Client"""
111111

112112
def __init__(self, *, table_prefix: Optional[str] = None):
113113
data_algebra.db_model.DBModel.__init__(

data_algebra/MySQL.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def _MySQL_concat_expr(dbmodel, expression):
5656

5757
class MySQLModel(data_algebra.db_model.DBModel):
5858
"""A model of how SQL should be generated for MySQL.
59-
Assuming we are using a sqlalchemy engine as our connection.
59+
Assuming we are using a sqlalchemy engine as our connection.
6060
"""
6161

6262
def __init__(self):

data_algebra/PostgreSQL.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def _postgresql_null_divide_expr(dbmodel, expression):
4141

4242
class PostgreSQLModel(data_algebra.db_model.DBModel):
4343
"""A model of how SQL should be generated for PostgreSQL.
44-
Assuming we are using a sqlalchemy engine as our connection
44+
Assuming we are using a sqlalchemy engine as our connection
4545
"""
4646

4747
def __init__(self):

data_algebra/SQLite.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -452,7 +452,8 @@ def _emit_full_join_as_complex(
452452
# get shared key set
453453
left_descr.project({}, group_by=join_columns)
454454
.concat_rows(
455-
b=right_descr.project({}, group_by=join_columns), id_column=None,
455+
b=right_descr.project({}, group_by=join_columns),
456+
id_column=None,
456457
)
457458
.project({}, group_by=join_columns)
458459
# simulate full join with left joins

data_algebra/arrow.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@
77

88
class Arrow(abc.ABC):
99
"""
10-
Arrow from category theory: see Steve Awody,
10+
Arrow from category theory: see Steve Awody,
1111
"Category Theory, 2nd Edition", Oxford Univ. Press, 2010 pg. 4.
12-
Essentially this is a tool to expose associativity, without forcing
12+
Essentially this is a tool to expose associativity, without forcing
1313
things to be a function to expose this effect.
1414
"""
1515

@@ -26,16 +26,16 @@ def cod(self):
2626

2727
@abc.abstractmethod
2828
def apply_to(self, b):
29-
""" apply_to b, compose arrows (right to left) """
29+
"""apply_to b, compose arrows (right to left)"""
3030

3131
# noinspection PyPep8Naming
3232
@abc.abstractmethod
3333
def act_on(self, X):
34-
""" act on X, must associate with composition """
34+
"""act on X, must associate with composition"""
3535

3636
# noinspection PyPep8Naming
3737
def transform(self, X):
38-
""" transform X, may or may not associate with composition """
38+
"""transform X, may or may not associate with composition"""
3939
return self.act_on(X)
4040

4141
def __rshift__(self, other): # override self >> other
@@ -177,25 +177,29 @@ def fit_transform(self, X, y=None):
177177
def dom(self):
178178
return DataOpArrow(
179179
data_algebra.data_ops.TableDescription(
180-
table_name=None, column_names=self.incoming_columns,
180+
table_name=None,
181+
column_names=self.incoming_columns,
181182
)
182183
)
183184

184185
def dom_as_table(self):
185186
return data_algebra.data_ops.TableDescription(
186-
table_name=None, column_names=self.incoming_columns,
187+
table_name=None,
188+
column_names=self.incoming_columns,
187189
)
188190

189191
def cod(self):
190192
return DataOpArrow(
191193
data_algebra.data_ops.TableDescription(
192-
table_name=None, column_names=self.outgoing_columns,
194+
table_name=None,
195+
column_names=self.outgoing_columns,
193196
)
194197
)
195198

196199
def cod_as_table(self):
197200
return data_algebra.data_ops.TableDescription(
198-
table_name=None, column_names=self.outgoing_columns,
201+
table_name=None,
202+
column_names=self.outgoing_columns,
199203
)
200204

201205
def __repr__(self):
@@ -234,7 +238,8 @@ def format_end_description(
234238

235239
def __str__(self):
236240
in_rep = self.format_end_description(
237-
required_cols=self.incoming_columns, forbidden_cols=self.disallowed_columns,
241+
required_cols=self.incoming_columns,
242+
forbidden_cols=self.disallowed_columns,
238243
)
239244
out_rep = self.format_end_description(
240245
required_cols=self.outgoing_columns,

data_algebra/cdata.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -731,7 +731,10 @@ def pivot_specification(
731731
record_map = RecordMap(
732732
blocks_in=RecordSpecification(
733733
control_table=data_algebra.pandas_model.pd.DataFrame(
734-
{col_name_key: value_cols, col_value_key: value_cols,}
734+
{
735+
col_name_key: value_cols,
736+
col_value_key: value_cols,
737+
}
735738
),
736739
record_keys=row_keys,
737740
control_table_keys=[col_name_key],
@@ -768,7 +771,10 @@ def unpivot_specification(
768771
record_map = RecordMap(
769772
blocks_out=RecordSpecification(
770773
control_table=data_algebra.pandas_model.pd.DataFrame(
771-
{col_name_key: value_cols, col_value_key: value_cols,}
774+
{
775+
col_name_key: value_cols,
776+
col_value_key: value_cols,
777+
}
772778
),
773779
record_keys=row_keys,
774780
control_table_keys=[col_name_key],

data_algebra/data_ops.py

Lines changed: 45 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ def _work_col_group_arg(arg, *, arg_name: str, columns: Iterable[str]):
8888

8989
class ViewRepresentation(OperatorPlatform, abc.ABC):
9090
"""Structure to represent the columns of a query or a table.
91-
Abstract base class."""
91+
Abstract base class."""
9292

9393
column_names: Tuple[str, ...]
9494
sources: Tuple[
@@ -366,7 +366,12 @@ def to_near_sql_implementation_(
366366
:return: data_algebra.near_sql.NearSQL
367367
"""
368368

369-
def to_sql(self, db_model=None, *, sql_format_options=None,) -> str:
369+
def to_sql(
370+
self,
371+
db_model=None,
372+
*,
373+
sql_format_options=None,
374+
) -> str:
370375
"""
371376
Convert operator dag to SQL.
372377
@@ -376,8 +381,12 @@ def to_sql(self, db_model=None, *, sql_format_options=None,) -> str:
376381
"""
377382
if db_model is None:
378383
import data_algebra.SQLite # import late to avoid circular import issue
384+
379385
db_model = data_algebra.SQLite.SQLiteModel()
380-
return db_model.to_sql(ops=self, sql_format_options=sql_format_options,)
386+
return db_model.to_sql(
387+
ops=self,
388+
sql_format_options=sql_format_options,
389+
)
381390

382391
# Pandas realization
383392

@@ -418,14 +427,14 @@ def eval(
418427
check_incoming_data_constraints: bool = False,
419428
):
420429
"""
421-
Evaluate operators with respect to Pandas data frames.
430+
Evaluate operators with respect to Pandas data frames.
422431
423-
:param data_map: map from table names to data frames
424-
:param data_model: adaptor to data dialect (Pandas for now)
425-
:param narrow: logical, if True don't copy unexpected columns
426-
:param check_incoming_data_constraints: logical, if True check incoming data meets constraints
427-
:return: table result
428-
"""
432+
:param data_map: map from table names to data frames
433+
:param data_model: adaptor to data dialect (Pandas for now)
434+
:param narrow: logical, if True don't copy unexpected columns
435+
:param check_incoming_data_constraints: logical, if True check incoming data meets constraints
436+
:return: table result
437+
"""
429438

430439
if data_map is not None:
431440
assert isinstance(data_map, dict)
@@ -652,13 +661,13 @@ def project(self, ops=None, *, group_by=None) -> "ViewRepresentation":
652661
return self.project_parsed_(parsed_ops=parsed_ops, group_by=group_by)
653662

654663
def natural_join(
655-
self,
656-
b,
657-
*,
658-
on: Optional[Iterable[str]] = None,
664+
self,
665+
b,
666+
*,
667+
on: Optional[Iterable[str]] = None,
659668
jointype: str,
660669
check_all_common_keys_in_equi_spec: bool = False,
661-
by: Optional[Iterable[str]] = None,
670+
by: Optional[Iterable[str]] = None,
662671
check_all_common_keys_in_by: bool = False,
663672
) -> "ViewRepresentation":
664673
"""
@@ -686,7 +695,9 @@ def natural_join(
686695
assert isinstance(jointype, str)
687696
assert isinstance(check_all_common_keys_in_equi_spec, bool)
688697
assert isinstance(check_all_common_keys_in_by, bool)
689-
check_all_common_keys_in_equi_spec = check_all_common_keys_in_equi_spec or check_all_common_keys_in_by
698+
check_all_common_keys_in_equi_spec = (
699+
check_all_common_keys_in_equi_spec or check_all_common_keys_in_by
700+
)
690701
if self.is_trivial_when_intermediate_():
691702
return self.sources[0].natural_join(b, on=on, jointype=jointype)
692703
return NaturalJoinNode(
@@ -807,7 +818,7 @@ def select_columns(self, columns) -> "ViewRepresentation":
807818
if isinstance(self, DropColumnsNode):
808819
return self.sources[0].select_columns(columns)
809820
return SelectColumnsNode(source=self, columns=columns)
810-
821+
811822
def map_columns(self, column_remapping) -> "ViewRepresentation":
812823
"""
813824
Map column names or rename.
@@ -877,12 +888,12 @@ def convert_records(self, record_map) -> "ViewRepresentation":
877888

878889
class TableDescription(ViewRepresentation):
879890
"""
880-
Describe columns, and qualifiers, of a table.
891+
Describe columns, and qualifiers, of a table.
881892
882-
Example:
883-
from data_algebra.data_ops import *
884-
d = TableDescription(table_name='d', column_names=['x', 'y'])
885-
print(d)
893+
Example:
894+
from data_algebra.data_ops import *
895+
d = TableDescription(table_name='d', column_names=['x', 'y'])
896+
print(d)
886897
"""
887898

888899
table_name: str
@@ -2419,7 +2430,15 @@ class NaturalJoinNode(ViewRepresentation):
24192430
on: List[str]
24202431
jointype: str
24212432

2422-
def __init__(self, a, b, *, on: Optional[Iterable[str]], jointype: str, check_all_common_keys_in_equi_spec: bool = False):
2433+
def __init__(
2434+
self,
2435+
a,
2436+
b,
2437+
*,
2438+
on: Optional[Iterable[str]],
2439+
jointype: str,
2440+
check_all_common_keys_in_equi_spec: bool = False,
2441+
):
24232442
# check set of tables is consistent in both sub-dags
24242443
a_tables = a.get_tables()
24252444
b_tables = b.get_tables()
@@ -2850,7 +2869,9 @@ def __init__(
28502869
self.sql = sql.copy()
28512870
self.view_name = view_name
28522871
ViewRepresentation.__init__(
2853-
self, column_names=column_names, node_name="SQLNode",
2872+
self,
2873+
column_names=column_names,
2874+
node_name="SQLNode",
28542875
)
28552876

28562877
def apply_to(self, a, *, target_table_key=None):

data_algebra/data_ops_types.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -250,13 +250,13 @@ def project(self, ops=None, *, group_by=None):
250250

251251
@abc.abstractmethod
252252
def natural_join(
253-
self,
254-
b,
255-
*,
256-
on: Optional[Iterable[str]] = None,
253+
self,
254+
b,
255+
*,
256+
on: Optional[Iterable[str]] = None,
257257
jointype: str,
258258
check_all_common_keys_in_equi_spec: bool = False,
259-
by: Optional[Iterable[str]] = None,
259+
by: Optional[Iterable[str]] = None,
260260
check_all_common_keys_in_by: bool = False
261261
):
262262
"""

0 commit comments

Comments
 (0)