Skip to content

Commit 7db106c

Browse files
committed
Minor - Docs fixes
1 parent 6a85f19 commit 7db106c

File tree

6 files changed

+13
-24
lines changed

6 files changed

+13
-24
lines changed

awswrangler/s3/_download.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def download(
1919
boto3_session: Optional[boto3.Session] = None,
2020
s3_additional_kwargs: Optional[Dict[str, Any]] = None,
2121
) -> None:
22-
"""Download file from from a received S3 path to local file.
22+
"""Download file from a received S3 path to local file.
2323
2424
Note
2525
----

awswrangler/s3/_read_excel.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def read_excel(
2020
s3_additional_kwargs: Optional[Dict[str, Any]] = None,
2121
**pandas_kwargs: Any,
2222
) -> pd.DataFrame:
23-
"""Read EXCEL file(s) from from a received S3 path.
23+
"""Read EXCEL file(s) from a received S3 path.
2424
2525
Note
2626
----

awswrangler/s3/_read_parquet.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -565,7 +565,7 @@ def read_parquet(
565565
s3_additional_kwargs: Optional[Dict[str, Any]] = None,
566566
pyarrow_additional_kwargs: Optional[Dict[str, Any]] = None,
567567
) -> Union[pd.DataFrame, Iterator[pd.DataFrame]]:
568-
"""Read Apache Parquet file(s) from from a received S3 prefix or list of S3 objects paths.
568+
"""Read Apache Parquet file(s) from a received S3 prefix or list of S3 objects paths.
569569
570570
The concept of Dataset goes beyond the simple idea of files and enable more
571571
complex features like partitioning and catalog integration (AWS Glue Catalog).
@@ -998,7 +998,7 @@ def read_parquet_metadata(
998998
s3_additional_kwargs: Optional[Dict[str, Any]] = None,
999999
pyarrow_additional_kwargs: Optional[Dict[str, Any]] = None,
10001000
) -> Tuple[Dict[str, str], Optional[Dict[str, str]]]:
1001-
"""Read Apache Parquet file(s) metadata from from a received S3 prefix or list of S3 objects paths.
1001+
"""Read Apache Parquet file(s) metadata from a received S3 prefix or list of S3 objects paths.
10021002
10031003
The concept of Dataset goes beyond the simple idea of files and enable more
10041004
complex features like partitioning and catalog integration (AWS Glue Catalog).

awswrangler/s3/_read_text.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@ def read_csv(
179179
partition_filter: Optional[Callable[[Dict[str, str]], bool]] = None,
180180
**pandas_kwargs: Any,
181181
) -> Union[pd.DataFrame, Iterator[pd.DataFrame]]:
182-
"""Read CSV file(s) from from a received S3 prefix or list of S3 objects paths.
182+
"""Read CSV file(s) from a received S3 prefix or list of S3 objects paths.
183183
184184
This function accepts Unix shell-style wildcards in the path argument.
185185
* (matches everything), ? (matches any single character),
@@ -327,7 +327,7 @@ def read_fwf(
327327
partition_filter: Optional[Callable[[Dict[str, str]], bool]] = None,
328328
**pandas_kwargs: Any,
329329
) -> Union[pd.DataFrame, Iterator[pd.DataFrame]]:
330-
"""Read fixed-width formatted file(s) from from a received S3 prefix or list of S3 objects paths.
330+
"""Read fixed-width formatted file(s) from a received S3 prefix or list of S3 objects paths.
331331
332332
This function accepts Unix shell-style wildcards in the path argument.
333333
* (matches everything), ? (matches any single character),
@@ -476,7 +476,7 @@ def read_json(
476476
partition_filter: Optional[Callable[[Dict[str, str]], bool]] = None,
477477
**pandas_kwargs: Any,
478478
) -> Union[pd.DataFrame, Iterator[pd.DataFrame]]:
479-
"""Read JSON file(s) from from a received S3 prefix or list of S3 objects paths.
479+
"""Read JSON file(s) from a received S3 prefix or list of S3 objects paths.
480480
481481
This function accepts Unix shell-style wildcards in the path argument.
482482
* (matches everything), ? (matches any single character),

awswrangler/s3/_write_parquet.py

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -748,11 +748,11 @@ def store_parquet_metadata( # pylint: disable=too-many-arguments
748748
) -> Tuple[Dict[str, str], Optional[Dict[str, str]], Optional[Dict[str, List[str]]]]:
749749
"""Infer and store parquet metadata on AWS Glue Catalog.
750750
751-
Infer Apache Parquet file(s) metadata from from a received S3 prefix or list of S3 objects paths
751+
Infer Apache Parquet file(s) metadata from a received S3 prefix
752752
And then stores it on AWS Glue Catalog including all inferred partitions
753-
(No need of 'MSCK REPAIR TABLE')
753+
(No need for 'MSCK REPAIR TABLE')
754754
755-
The concept of Dataset goes beyond the simple idea of files and enable more
755+
The concept of Dataset goes beyond the simple idea of files and enables more
756756
complex features like partitioning and catalog integration (AWS Glue Catalog).
757757
758758
This function accepts Unix shell-style wildcards in the path argument.
@@ -761,22 +761,15 @@ def store_parquet_metadata( # pylint: disable=too-many-arguments
761761
If you want to use a path which includes Unix shell-style wildcard characters (`*, ?, []`),
762762
you can use `glob.escape(path)` before passing the path to this function.
763763
764-
Note
765-
----
766-
On `append` mode, the `parameters` will be upsert on an existing table.
767-
768764
Note
769765
----
770766
In case of `use_threads=True` the number of threads
771767
that will be spawned will be gotten from os.cpu_count().
772768
773769
Parameters
774770
----------
775-
path : Union[str, List[str]]
776-
S3 prefix (accepts Unix shell-style wildcards)
777-
(e.g. s3://bucket/prefix) or list of S3 objects paths (e.g. [s3://bucket/key0, s3://bucket/key1]).
778-
database : str
779-
Glue/Athena catalog: Database name.
771+
path : str
772+
S3 prefix (accepts Unix shell-style wildcards) (e.g. s3://bucket/prefix).
780773
table : str
781774
Glue/Athena catalog: Table name.
782775
database : str

awswrangler/s3/_write_text.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -124,11 +124,7 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state
124124
125125
Note
126126
----
127-
Compression: The minimum acceptable version to achive it is Pandas 1.2.0 that requires Python >= 3.7.1.
128-
129-
Note
130-
----
131-
On `append` mode, the `parameters` will be upsert on an existing table.
127+
Compression: The minimum acceptable version to achieve it is Pandas 1.2.0 that requires Python >= 3.7.1.
132128
133129
Note
134130
----

0 commit comments

Comments
 (0)