Skip to content

Commit 9181673

Browse files
author
Yalin Li
authored
Fix Sphinx on azure-storage-file-datalake (#35150)
1 parent 11d9e69 commit 9181673

14 files changed

+90
-96
lines changed

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -104,14 +104,18 @@ def from_connection_string(
104104
- except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
105105
If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key"
106106
should be the storage account key.
107-
:paramtype credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long
107+
:type credential:
108+
~azure.core.credentials.AzureNamedKeyCredential or
109+
~azure.core.credentials.AzureSasCredential or
110+
~azure.core.credentials.TokenCredential or
111+
str or dict[str, str] or None
108112
:param directory_name:
109113
The name of directory to interact with. The directory is under file system.
110114
:type directory_name: str
111115
:keyword str audience: The audience to use when requesting tokens for Azure Active Directory
112116
authentication. Only has an effect when credential is of type TokenCredential. The value could be
113117
https://storage.azure.com/ (default) or https://<account>.blob.core.windows.net.
114-
:return: a DataLakeDirectoryClient
118+
:return: A DataLakeDirectoryClient.
115119
:rtype: ~azure.storage.filedatalake.DataLakeDirectoryClient
116120
"""
117121
account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs')
@@ -194,7 +198,7 @@ def create_directory(self, metadata=None, # type: Optional[Dict[str, str]]
194198
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-file-datalake
195199
#other-client--per-operation-configuration>`_.
196200
:return: A dictionary of response headers.
197-
:rtype: Dict[str, Union[str, datetime]]
201+
:rtype: dict[str, Union[str, datetime]]
198202
199203
.. admonition:: Example:
200204

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -114,12 +114,16 @@ def from_connection_string(
114114
Credentials provided here will take precedence over those in the connection string.
115115
If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key"
116116
should be the storage account key.
117-
:paramtype credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long
117+
:type credential:
118+
~azure.core.credentials.AzureNamedKeyCredential or
119+
~azure.core.credentials.AzureSasCredential or
120+
~azure.core.credentials.TokenCredential or
121+
str or dict[str, str] or None
118122
:keyword str audience: The audience to use when requesting tokens for Azure Active Directory
119123
authentication. Only has an effect when credential is of type TokenCredential. The value could be
120124
https://storage.azure.com/ (default) or https://<account>.blob.core.windows.net.
121-
:return a DataLakeFileClient
122-
:rtype ~azure.storage.filedatalake.DataLakeFileClient
125+
:returns: A DataLakeFileClient.
126+
:rtype: ~azure.storage.filedatalake.DataLakeFileClient
123127
"""
124128
account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs')
125129
return cls(

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -150,12 +150,16 @@ def from_connection_string(
150150
an instance of a AzureSasCredential from azure.core.credentials, an account shared access
151151
key, or an instance of a TokenCredentials class from azure.identity.
152152
Credentials provided here will take precedence over those in the connection string.
153-
:paramtype credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] # pylint: disable=line-too-long
153+
:type credential:
154+
~azure.core.credentials.AzureNamedKeyCredential or
155+
~azure.core.credentials.AzureSasCredential or
156+
~azure.core.credentials.TokenCredential or
157+
str or dict[str, str] or None
154158
:keyword str audience: The audience to use when requesting tokens for Azure Active Directory
155159
authentication. Only has an effect when credential is of type TokenCredential. The value could be
156160
https://storage.azure.com/ (default) or https://<account>.blob.core.windows.net.
157-
:return a DataLakeServiceClient
158-
:rtype ~azure.storage.filedatalake.DataLakeServiceClient
161+
:returns: A DataLakeServiceClient.
162+
:rtype: ~azure.storage.filedatalake.DataLakeServiceClient
159163
160164
.. admonition:: Example:
161165
@@ -613,7 +617,7 @@ def get_service_properties(self, **kwargs):
613617
#other-client--per-operation-configuration>`_.
614618
:returns: An object containing datalake service properties such as
615619
analytics logging, hour/minute metrics, cors rules, etc.
616-
:rtype: Dict[str, Any]
620+
:rtype: dict[str, Any]
617621
"""
618622
props = self._blob_service_client.get_service_properties(**kwargs) # pylint: disable=protected-access
619623
return get_datalake_service_properties(props)

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -164,12 +164,16 @@ def from_connection_string(
164164
Credentials provided here will take precedence over those in the connection string.
165165
If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key"
166166
should be the storage account key.
167-
:paramtype credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long
167+
:type credential:
168+
~azure.core.credentials.AzureNamedKeyCredential or
169+
~azure.core.credentials.AzureSasCredential or
170+
~azure.core.credentials.TokenCredential or
171+
str or dict[str, str] or None
168172
:keyword str audience: The audience to use when requesting tokens for Azure Active Directory
169173
authentication. Only has an effect when credential is of type TokenCredential. The value could be
170174
https://storage.azure.com/ (default) or https://<account>.blob.core.windows.net.
171-
:return a FileSystemClient
172-
:rtype ~azure.storage.filedatalake.FileSystemClient
175+
:returns: A FileSystemClient.
176+
:rtype: ~azure.storage.filedatalake.FileSystemClient
173177
174178
.. admonition:: Example:
175179
@@ -275,7 +279,7 @@ def create_file_system(self, metadata=None, # type: Optional[Dict[str, str]]
275279
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-file-datalake
276280
#other-client--per-operation-configuration>`_.
277281
:returns: A dictionary of response headers.
278-
:rtype: Dict[str, Union[str, datetime]]
282+
:rtype: dict[str, Union[str, datetime]]
279283
280284
.. admonition:: Example:
281285
@@ -457,7 +461,7 @@ def set_file_system_metadata( # type: ignore
457461
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-file-datalake
458462
#other-client--per-operation-configuration>`_.
459463
:returns: filesystem-updated property dict (Etag and last modified).
460-
:rtype: Dict[str, str]
464+
:rtype: dict[str, str] or dict[str, ~datetime.datetime]
461465
462466
.. admonition:: Example:
463467
@@ -510,7 +514,7 @@ def set_file_system_access_policy(
510514
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-file-datalake
511515
#other-client--per-operation-configuration>`_.
512516
:returns: File System-updated property dict (Etag and last modified).
513-
:rtype: dict[str, str or ~datetime.datetime]
517+
:rtype: dict[str, str] or dict[str, ~datetime.datetime]
514518
"""
515519
return self._container_client.set_container_access_policy(signed_identifiers,
516520
public_access=public_access, **kwargs)

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -770,7 +770,7 @@ class LocationMode(object):
770770
class DelimitedJsonDialect(BlobDelimitedJSON):
771771
"""Defines the input or output JSON serialization for a datalake query.
772772
773-
:keyword str delimiter: The line separator character, default value is '\n'
773+
:keyword str delimiter: The line separator character, default value is '\\\\n'.
774774
"""
775775

776776

@@ -782,7 +782,7 @@ class DelimitedTextDialect(BlobDelimitedTextDialect):
782782
:keyword str quotechar:
783783
Field quote, defaults to '"'.
784784
:keyword str lineterminator:
785-
Record separator, defaults to '\n'.
785+
Record separator, defaults to '\\\\n'.
786786
:keyword str escapechar:
787787
Escape char, defaults to empty.
788788
:keyword bool has_header:

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -282,7 +282,7 @@ def _create(self, resource_type, content_settings=None, metadata=None, **kwargs)
282282
:return: A dictionary of response headers.
283283
:keyword str encryption_context:
284284
Specifies the encryption context to set on the file.
285-
:rtype: Dict[str, Union[str, datetime]]
285+
:rtype: dict[str, str] or dict[str, ~datetime.datetime]
286286
"""
287287
lease_id = kwargs.get('lease_id', None)
288288
lease_duration = kwargs.get('lease_duration', None)
@@ -347,7 +347,7 @@ def _delete(self, **kwargs):
347347
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-file-datalake
348348
#other-client--per-operation-configuration>`_.
349349
:returns: A dictionary containing information about the deleted path.
350-
:rtype: Dict[str, Any]
350+
:rtype: dict[str, Any]
351351
"""
352352
# Perform paginated delete only if using OAuth, deleting a directory, and api version is 2023-08-03 or later
353353
# The pagination is only for ACL checks, the final request remains the atomic delete operation
@@ -980,7 +980,7 @@ def set_metadata(self, metadata, # type: Dict[str, str]
980980
:param metadata:
981981
A dict containing name-value pairs to associate with the file system as
982982
metadata. Example: {'category':'test'}
983-
:type metadata: Dict[str, str]
983+
:type metadata: dict[str, str]
984984
:keyword lease:
985985
If specified, set_file_system_metadata only succeeds if the
986986
file system's lease is active and matches this ID.
@@ -1012,6 +1012,7 @@ def set_metadata(self, metadata, # type: Dict[str, str]
10121012
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-file-datalake
10131013
#other-client--per-operation-configuration>`_.
10141014
:returns: file system-updated property dict (Etag and last modified).
1015+
:rtype: dict[str, str] or dict[str, ~datetime.datetime]
10151016
"""
10161017
return self._blob_client.set_blob_metadata(metadata=metadata, **kwargs)
10171018

@@ -1052,7 +1053,7 @@ def set_http_headers(self, content_settings: Optional["ContentSettings"] = None,
10521053
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-file-datalake
10531054
#other-client--per-operation-configuration>`_.
10541055
:returns: file/directory-updated property dict (Etag and last modified)
1055-
:rtype: Dict[str, Any]
1056+
:rtype: dict[str, Any]
10561057
"""
10571058
return self._blob_client.set_http_headers(content_settings=content_settings, **kwargs)
10581059

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@
4242

4343
if TYPE_CHECKING:
4444
from azure.core.credentials import TokenCredential
45-
from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
45+
from azure.core.pipeline import ( # pylint: disable=non-abstract-transport-import
4646
PipelineRequest,
4747
PipelineResponse
4848
)
@@ -411,8 +411,9 @@ def _set_next_host_location(self, settings: Dict[str, Any], request: "PipelineRe
411411
"""
412412
A function which sets the next host location on the request, if applicable.
413413
414-
:param Dict[str, Any]] settings: The configurable values pertaining to the next host location.
415-
:param PipelineRequest request: A pipeline request object.
414+
:param dict[str, Any]] settings: The configurable values pertaining to the next host location.
415+
:param request: A pipeline request object.
416+
:type request: ~azure.core.pipeline.PipelineRequest
416417
"""
417418
if settings['hosts'] and all(settings['hosts'].values()):
418419
url = urlparse(request.url)
@@ -451,7 +452,7 @@ def get_backoff_time(self, settings: Dict[str, Any]) -> float: # pylint: disabl
451452
""" Formula for computing the current backoff.
452453
Should be calculated by child class.
453454
454-
:param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
455+
:param dict[str, Any]] settings: The configurable values pertaining to the backoff time.
455456
:returns: The backoff time.
456457
:rtype: float
457458
"""
@@ -471,12 +472,14 @@ def increment(
471472
) -> bool:
472473
"""Increment the retry counters.
473474
474-
Dict[str, Any]] settings: The configurable values pertaining to the increment operation.
475-
:param PipelineRequest request: A pipeline request object.
476-
:param Optional[PipelineResponse] response: A pipeline response object.
475+
:param dict[str, Any]] settings: The configurable values pertaining to the increment operation.
476+
:param request: A pipeline request object.
477+
:type request: ~azure.core.pipeline.PipelineRequest
478+
:param response: A pipeline response object.
479+
:type response: ~azure.core.pipeline.PipelineResponse or None
477480
:param error: An error encountered during the request, or
478481
None if the response was received successfully.
479-
:paramtype error: Optional[AzureError]
482+
:type error: ~azure.core.exceptions.AzureError or None
480483
:returns: Whether the retry attempts are exhausted.
481484
:rtype: bool
482485
"""
@@ -608,7 +611,7 @@ def get_backoff_time(self, settings: Dict[str, Any]) -> float:
608611
"""
609612
Calculates how long to sleep before retrying.
610613
611-
:param Dict[str, Any]] settings: The configurable values pertaining to get backoff time.
614+
:param dict[str, Any]] settings: The configurable values pertaining to get backoff time.
612615
:returns:
613616
A float indicating how long to wait before retrying the request,
614617
or None to indicate no retry should be performed.
@@ -660,7 +663,7 @@ def get_backoff_time(self, settings: Dict[str, Any]) -> float:
660663
"""
661664
Calculates how long to sleep before retrying.
662665
663-
:param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
666+
:param dict[str, Any]] settings: The configurable values pertaining to the backoff time.
664667
:returns:
665668
A float indicating how long to wait before retrying the request,
666669
or None to indicate no retry should be performed.

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
if TYPE_CHECKING:
2121
from azure.core.credentials_async import AsyncTokenCredential
22-
from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
22+
from azure.core.pipeline import ( # pylint: disable=non-abstract-transport-import
2323
PipelineRequest,
2424
PipelineResponse
2525
)
@@ -195,7 +195,7 @@ def get_backoff_time(self, settings: Dict[str, Any]) -> float:
195195
"""
196196
Calculates how long to sleep before retrying.
197197
198-
:param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
198+
:param dict[str, Any]] settings: The configurable values pertaining to the backoff time.
199199
:return:
200200
An integer indicating how long to wait before retrying the request,
201201
or None to indicate no retry should be performed.
@@ -247,7 +247,7 @@ def get_backoff_time(self, settings: Dict[str, Any]) -> float:
247247
"""
248248
Calculates how long to sleep before retrying.
249249
250-
:param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
250+
:param dict[str, Any]] settings: The configurable values pertaining to the backoff time.
251251
:return:
252252
An integer indicating how long to wait before retrying the request,
253253
or None to indicate no retry should be performed.

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py

Lines changed: 1 addition & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ async def create_directory(self, metadata=None, # type: Optional[Dict[str, str]
158158
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-file-datalake
159159
#other-client--per-operation-configuration>`_.
160160
:return: A dictionary of response headers.
161-
:rtype: Dict[str, Union[str, datetime]]
161+
:rtype: dict[str, str] or dict[str, ~datetime.datetime]
162162
163163
.. admonition:: Example:
164164
@@ -612,15 +612,6 @@ def get_file_client(self, file # type: Union[FileProperties, str]
612612
:type file: str or ~azure.storage.filedatalake.FileProperties
613613
:returns: A DataLakeFileClient.
614614
:rtype: ~azure.storage.filedatalake.aio.DataLakeFileClient
615-
616-
.. admonition:: Example:
617-
618-
.. literalinclude:: ../samples/test_datalake_service_samples.py
619-
:start-after: [START bsc_get_file_client]
620-
:end-before: [END bsc_get_file_client]
621-
:language: python
622-
:dedent: 12
623-
:caption: Getting the file client to interact with a specific file.
624615
"""
625616
try:
626617
file_path = file.get('name')
@@ -649,15 +640,6 @@ def get_sub_directory_client(self, sub_directory # type: Union[DirectoryPropert
649640
:type sub_directory: str or ~azure.storage.filedatalake.DirectoryProperties
650641
:returns: A DataLakeDirectoryClient.
651642
:rtype: ~azure.storage.filedatalake.aio.DataLakeDirectoryClient
652-
653-
.. admonition:: Example:
654-
655-
.. literalinclude:: ../samples/test_datalake_service_samples.py
656-
:start-after: [START bsc_get_directory_client]
657-
:end-before: [END bsc_get_directory_client]
658-
:language: python
659-
:dedent: 12
660-
:caption: Getting the directory client to interact with a specific directory.
661643
"""
662644
try:
663645
subdir_path = sub_directory.get('name')

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ async def create_file(self, content_settings=None, # type: Optional[ContentSett
9292
ContentSettings object used to set path properties.
9393
:param metadata:
9494
Name-value pairs associated with the file as metadata.
95-
:type metadata: Optional[Dict[str, str]]
95+
:type metadata: Optional[dict[str, str]]
9696
:keyword lease:
9797
Required if the file has an active lease. Value can be a DataLakeLeaseClient object
9898
or the lease ID as a string.
@@ -345,7 +345,7 @@ async def upload_data(
345345
ContentSettings object used to set path properties.
346346
:keyword metadata:
347347
Name-value pairs associated with the blob as metadata.
348-
:paramtype metadata: Optional[Dict[str, str]]
348+
:paramtype metadata: Optional[dict[str, str]]
349349
:keyword ~azure.storage.filedatalake.DataLakeLeaseClient or str lease:
350350
Required if the blob has an active lease. Value can be a DataLakeLeaseClient object
351351
or the lease ID as a string.

0 commit comments

Comments
 (0)