Skip to content

Commit 7600bdf

Browse files
authored
Making datetime timezone aware in CW module and adding security token in Redshift (#581)
1 parent ac5407b commit 7600bdf

File tree

2 files changed

+19
-3
lines changed

2 files changed

+19
-3
lines changed

awswrangler/cloudwatch.py

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,20 @@
1515
_QUERY_WAIT_POLLING_DELAY: float = 0.2 # SECONDS
1616

1717

18+
def _validate_args(
19+
start_timestamp: int,
20+
end_timestamp: int,
21+
) -> None:
22+
if start_timestamp < 0:
23+
raise exceptions.InvalidArgument("`start_time` cannot be a negative value.")
24+
if start_timestamp >= end_timestamp:
25+
raise exceptions.InvalidArgumentCombination("`start_time` must be inferior to `end_time`.")
26+
27+
1828
def start_query(
1929
query: str,
2030
log_group_names: List[str],
21-
start_time: datetime.datetime = datetime.datetime(year=1970, month=1, day=1),
31+
start_time: datetime.datetime = datetime.datetime(year=1970, month=1, day=1, tzinfo=datetime.timezone.utc),
2232
end_time: datetime.datetime = datetime.datetime.now(),
2333
limit: Optional[int] = None,
2434
boto3_session: Optional[boto3.Session] = None,
@@ -61,6 +71,7 @@ def start_query(
6171
end_timestamp: int = int(1000 * end_time.timestamp())
6272
_logger.debug("start_timestamp: %s", start_timestamp)
6373
_logger.debug("end_timestamp: %s", end_timestamp)
74+
_validate_args(start_timestamp=start_timestamp, end_timestamp=end_timestamp)
6475
args: Dict[str, Any] = {
6576
"logGroupNames": log_group_names,
6677
"startTime": start_timestamp,
@@ -120,7 +131,7 @@ def wait_query(query_id: str, boto3_session: Optional[boto3.Session] = None) ->
120131
def run_query(
121132
query: str,
122133
log_group_names: List[str],
123-
start_time: datetime.datetime = datetime.datetime(year=1970, month=1, day=1),
134+
start_time: datetime.datetime = datetime.datetime(year=1970, month=1, day=1, tzinfo=datetime.timezone.utc),
124135
end_time: datetime.datetime = datetime.datetime.now(),
125136
limit: Optional[int] = None,
126137
boto3_session: Optional[boto3.Session] = None,
@@ -174,7 +185,7 @@ def run_query(
174185
def read_logs(
175186
query: str,
176187
log_group_names: List[str],
177-
start_time: datetime.datetime = datetime.datetime(year=1970, month=1, day=1),
188+
start_time: datetime.datetime = datetime.datetime(year=1970, month=1, day=1, tzinfo=datetime.timezone.utc),
178189
end_time: datetime.datetime = datetime.datetime.now(),
179190
limit: Optional[int] = None,
180191
boto3_session: Optional[boto3.Session] = None,

tests/test_redshift.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -831,6 +831,7 @@ def test_copy_unload_creds(path, redshift_table):
831831
mode="overwrite",
832832
aws_access_key_id=credentials.access_key,
833833
aws_secret_access_key=credentials.secret_key,
834+
aws_session_token=credentials.token,
834835
)
835836
df2 = wr.redshift.unload(
836837
sql=f"SELECT * FROM public.{redshift_table}",
@@ -839,6 +840,7 @@ def test_copy_unload_creds(path, redshift_table):
839840
keep_files=False,
840841
aws_access_key_id=credentials.access_key,
841842
aws_secret_access_key=credentials.secret_key,
843+
aws_session_token=credentials.token,
842844
)
843845
assert df2.shape == (3, 1)
844846
wr.redshift.copy(
@@ -850,6 +852,7 @@ def test_copy_unload_creds(path, redshift_table):
850852
mode="append",
851853
aws_access_key_id=credentials.access_key,
852854
aws_secret_access_key=credentials.secret_key,
855+
aws_session_token=credentials.token,
853856
)
854857
df2 = wr.redshift.unload(
855858
sql=f"SELECT * FROM public.{redshift_table}",
@@ -858,6 +861,7 @@ def test_copy_unload_creds(path, redshift_table):
858861
keep_files=False,
859862
aws_access_key_id=credentials.access_key,
860863
aws_secret_access_key=credentials.secret_key,
864+
aws_session_token=credentials.token,
861865
)
862866
assert df2.shape == (6, 1)
863867
dfs = wr.redshift.unload(
@@ -868,6 +872,7 @@ def test_copy_unload_creds(path, redshift_table):
868872
chunked=True,
869873
aws_access_key_id=credentials.access_key,
870874
aws_secret_access_key=credentials.secret_key,
875+
aws_session_token=credentials.token,
871876
)
872877
for chunk in dfs:
873878
assert len(chunk.columns) == 1

0 commit comments

Comments
 (0)