Skip to content

Commit 72d3247

Browse files
chore: Remove upper bound for protobuf (feast-dev#4974)
* Remove upper bound for `protobuf` Signed-off-by: franco-bocci <[email protected]> Signed-off-by: Erin Boehmer <[email protected]> * Recompile python lock files Signed-off-by: Erin Boehmer <[email protected]> * Lint files Signed-off-by: Erin Boehmer <[email protected]> --------- Signed-off-by: franco-bocci <[email protected]> Signed-off-by: Erin Boehmer <[email protected]> Co-authored-by: Franco Bocci <[email protected]> Co-authored-by: Erin Boehmer <[email protected]>
1 parent 1267703 commit 72d3247

File tree

32 files changed

+455
-442
lines changed

32 files changed

+455
-442
lines changed

sdk/python/feast/errors.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def __str__(self) -> str:
3232

3333
def __repr__(self) -> str:
3434
if hasattr(self, "__overridden_message__"):
35-
return f"{type(self).__name__}('{getattr(self,'__overridden_message__')}')"
35+
return f"{type(self).__name__}('{getattr(self, '__overridden_message__')}')"
3636
return super().__repr__()
3737

3838
def to_error_detail(self) -> str:

sdk/python/feast/feature_store.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -865,8 +865,7 @@ def apply(
865865
views_to_update = [
866866
ob
867867
for ob in objects
868-
if
869-
(
868+
if (
870869
# BFVs are not handled separately from FVs right now.
871870
(isinstance(ob, FeatureView) or isinstance(ob, BatchFeatureView))
872871
and not isinstance(ob, StreamFeatureView)
@@ -2031,9 +2030,9 @@ def write_logged_features(
20312030
if not isinstance(source, FeatureService):
20322031
raise ValueError("Only feature service is currently supported as a source")
20332032

2034-
assert (
2035-
source.logging_config is not None
2036-
), "Feature service must be configured with logging config in order to use this functionality"
2033+
assert source.logging_config is not None, (
2034+
"Feature service must be configured with logging config in order to use this functionality"
2035+
)
20372036

20382037
assert isinstance(logs, (pa.Table, Path))
20392038

sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_engine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@ def _print_pod_logs(self, job_id, feature_view, offset=0):
278278
label_selector=f"job-name={job_id}",
279279
).items
280280
for i, pod in enumerate(pods_list):
281-
logger.info(f"Logging output for {feature_view.name} pod {offset+i}")
281+
logger.info(f"Logging output for {feature_view.name} pod {offset + i}")
282282
try:
283283
logger.info(
284284
self.v1.read_namespaced_pod_log(pod.metadata.name, self.namespace)

sdk/python/feast/infra/materialization/snowflake_engine.py

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -206,9 +206,9 @@ def __init__(
206206
online_store: OnlineStore,
207207
**kwargs,
208208
):
209-
assert (
210-
repo_config.offline_store.type == "snowflake.offline"
211-
), "To use SnowflakeMaterializationEngine, you must use Snowflake as an offline store."
209+
assert repo_config.offline_store.type == "snowflake.offline", (
210+
"To use SnowflakeMaterializationEngine, you must use Snowflake as an offline store."
211+
)
212212

213213
super().__init__(
214214
repo_config=repo_config,
@@ -241,10 +241,11 @@ def _materialize_one(
241241
project: str,
242242
tqdm_builder: Callable[[int], tqdm],
243243
):
244-
assert (
245-
isinstance(feature_view, BatchFeatureView)
246-
or isinstance(feature_view, FeatureView)
247-
), "Snowflake can only materialize FeatureView & BatchFeatureView feature view types."
244+
assert isinstance(feature_view, BatchFeatureView) or isinstance(
245+
feature_view, FeatureView
246+
), (
247+
"Snowflake can only materialize FeatureView & BatchFeatureView feature view types."
248+
)
248249

249250
entities = []
250251
for entity_name in feature_view.entities:
@@ -420,7 +421,7 @@ def generate_snowflake_materialization_query(
420421
{serial_func.upper()}({entity_names}, {entity_data}, {entity_types}) AS "entity_key",
421422
{features_str},
422423
"{feature_view.batch_source.timestamp_field}"
423-
{fv_created_str if fv_created_str else ''}
424+
{fv_created_str if fv_created_str else ""}
424425
FROM (
425426
{fv_latest_mapped_values_sql}
426427
)
@@ -460,7 +461,7 @@ def materialize_to_snowflake_online_store(
460461
"feature_name",
461462
"feature_value" AS "value",
462463
"{feature_view.batch_source.timestamp_field}" AS "event_ts"
463-
{fv_created_str + ' AS "created_ts"' if fv_created_str else ''}
464+
{fv_created_str + ' AS "created_ts"' if fv_created_str else ""}
464465
FROM (
465466
{materialization_sql}
466467
)
@@ -472,16 +473,16 @@ def materialize_to_snowflake_online_store(
472473
online_table."feature_name" = latest_values."feature_name",
473474
online_table."value" = latest_values."value",
474475
online_table."event_ts" = latest_values."event_ts"
475-
{',online_table."created_ts" = latest_values."created_ts"' if fv_created_str else ''}
476+
{',online_table."created_ts" = latest_values."created_ts"' if fv_created_str else ""}
476477
WHEN NOT MATCHED THEN
477-
INSERT ("entity_feature_key", "entity_key", "feature_name", "value", "event_ts" {', "created_ts"' if fv_created_str else ''})
478+
INSERT ("entity_feature_key", "entity_key", "feature_name", "value", "event_ts" {', "created_ts"' if fv_created_str else ""})
478479
VALUES (
479480
latest_values."entity_feature_key",
480481
latest_values."entity_key",
481482
latest_values."feature_name",
482483
latest_values."value",
483484
latest_values."event_ts"
484-
{',latest_values."created_ts"' if fv_created_str else ''}
485+
{',latest_values."created_ts"' if fv_created_str else ""}
485486
)
486487
"""
487488

sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -110,8 +110,8 @@ def pull_latest_from_table_or_query(
110110
SELECT {field_string},
111111
ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row
112112
FROM {from_expression}
113-
WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date.strftime('%Y-%m-%d %H:%M:%S')}' AND TIMESTAMP '{end_date.strftime('%Y-%m-%d %H:%M:%S')}'
114-
{"AND "+date_partition_column+" >= '"+start_date.strftime('%Y-%m-%d')+"' AND "+date_partition_column+" <= '"+end_date.strftime('%Y-%m-%d')+"' " if date_partition_column != "" and date_partition_column is not None else ''}
113+
WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date.strftime("%Y-%m-%d %H:%M:%S")}' AND TIMESTAMP '{end_date.strftime("%Y-%m-%d %H:%M:%S")}'
114+
{"AND " + date_partition_column + " >= '" + start_date.strftime("%Y-%m-%d") + "' AND " + date_partition_column + " <= '" + end_date.strftime("%Y-%m-%d") + "' " if date_partition_column != "" and date_partition_column is not None else ""}
115115
)
116116
WHERE _feast_row = 1
117117
"""
@@ -151,7 +151,7 @@ def pull_all_from_table_or_query(
151151
SELECT {field_string}
152152
FROM {from_expression}
153153
WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date.astimezone(tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]}' AND TIMESTAMP '{end_date.astimezone(tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]}'
154-
{"AND "+date_partition_column+" >= '"+start_date.strftime('%Y-%m-%d')+"' AND "+date_partition_column+" <= '"+end_date.strftime('%Y-%m-%d')+"' " if date_partition_column != "" and date_partition_column is not None else ''}
154+
{"AND " + date_partition_column + " >= '" + start_date.strftime("%Y-%m-%d") + "' AND " + date_partition_column + " <= '" + end_date.strftime("%Y-%m-%d") + "' " if date_partition_column != "" and date_partition_column is not None else ""}
155155
"""
156156

157157
return AthenaRetrievalJob(

sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ def query_generator() -> Iterator[str]:
156156
# Hack for query_context.entity_selections to support uppercase in columns
157157
for context in query_context_dict:
158158
context["entity_selections"] = [
159-
f""""{entity_selection.replace(' AS ', '" AS "')}\""""
159+
f""""{entity_selection.replace(" AS ", '" AS "')}\""""
160160
for entity_selection in context["entity_selections"]
161161
]
162162

@@ -370,7 +370,7 @@ def build_point_in_time_query(
370370
final_output_feature_names.extend(
371371
[
372372
(
373-
f'{fv["name"]}__{fv["field_mapping"].get(feature, feature)}'
373+
f"{fv['name']}__{fv['field_mapping'].get(feature, feature)}"
374374
if full_feature_names
375375
else fv["field_mapping"].get(feature, feature)
376376
)

sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def pull_latest_from_table_or_query(
111111
SELECT {fields_as_string},
112112
ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS feast_row_
113113
FROM {from_expression} t1
114-
WHERE {timestamp_field} BETWEEN TIMESTAMP('{start_date_str}') AND TIMESTAMP('{end_date_str}'){" AND "+date_partition_column+" >= '"+start_date.strftime('%Y-%m-%d')+"' AND "+date_partition_column+" <= '"+end_date.strftime('%Y-%m-%d')+"' " if date_partition_column != "" and date_partition_column is not None else ''}
114+
WHERE {timestamp_field} BETWEEN TIMESTAMP('{start_date_str}') AND TIMESTAMP('{end_date_str}'){" AND " + date_partition_column + " >= '" + start_date.strftime("%Y-%m-%d") + "' AND " + date_partition_column + " <= '" + end_date.strftime("%Y-%m-%d") + "' " if date_partition_column != "" and date_partition_column is not None else ""}
115115
) t2
116116
WHERE feast_row_ = 1
117117
"""

sdk/python/feast/infra/offline_stores/dask.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -193,9 +193,7 @@ def evaluate_historical_retrieval():
193193
):
194194
# Make sure all event timestamp fields are tz-aware. We default tz-naive fields to UTC
195195
entity_df_with_features[entity_df_event_timestamp_col] = (
196-
entity_df_with_features[
197-
entity_df_event_timestamp_col
198-
].apply(
196+
entity_df_with_features[entity_df_event_timestamp_col].apply(
199197
lambda x: x
200198
if x.tzinfo is not None
201199
else x.replace(tzinfo=timezone.utc)

sdk/python/feast/infra/online_stores/milvus_online_store/milvus.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -361,7 +361,9 @@ def retrieve_online_documents(
361361
assert all(
362362
field in [f["name"] for f in collection["fields"]]
363363
for field in output_fields
364-
), f"field(s) [{[field for field in output_fields if field not in [f['name'] for f in collection['fields']]]}] not found in collection schema"
364+
), (
365+
f"field(s) [{[field for field in output_fields if field not in [f['name'] for f in collection['fields']]]}] not found in collection schema"
366+
)
365367
# Note we choose the first vector field as the field to search on. Not ideal but it's something.
366368
ann_search_field = None
367369
for field in collection["fields"]:

sdk/python/feast/infra/online_stores/qdrant_online_store/qdrant.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -69,9 +69,9 @@ def _get_client(self, config: RepoConfig) -> QdrantClient:
6969
if self._client:
7070
return self._client
7171
online_store_config = config.online_store
72-
assert isinstance(
73-
online_store_config, QdrantOnlineStoreConfig
74-
), "Invalid type for online store config"
72+
assert isinstance(online_store_config, QdrantOnlineStoreConfig), (
73+
"Invalid type for online store config"
74+
)
7575

7676
assert online_store_config.similarity and (
7777
online_store_config.similarity.lower() in DISTANCE_MAPPING

0 commit comments

Comments
 (0)