Skip to content

Commit ee3db5f

Browse files
[Storage] [Next-Pylint] Datalake Package (Azure#30711)
* Datalake done * Remove dunder errors * Add back in dunder
1 parent 066ec1e commit ee3db5f

15 files changed

+34
-59
lines changed

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -378,14 +378,11 @@ def rename_directory(self, new_name, **kwargs):
378378
new_file_system, new_path, new_dir_sas = self._parse_rename_path(new_name)
379379

380380
new_directory_client = DataLakeDirectoryClient(
381-
"{}://{}".format(self.scheme, self.primary_hostname), new_file_system, directory_name=new_path,
382-
credential=self._raw_credential or new_dir_sas,
383-
_hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline)
381+
f"{self.scheme}://{self.primary_hostname}", new_file_system, directory_name=new_path,
382+
credential=self._raw_credential or new_dir_sas, _hosts=self._hosts, _configuration=self._config,
383+
_pipeline=self._pipeline)
384384
new_directory_client._rename_path( # pylint: disable=protected-access
385-
'/{}/{}{}'.format(quote(unquote(self.file_system_name)),
386-
quote(unquote(self.path_name)),
387-
self._query_str),
388-
**kwargs)
385+
f'/{quote(unquote(self.file_system_name))}/{quote(unquote(self.path_name))}{self._query_str}', **kwargs)
389386
return new_directory_client
390387

391388
def create_sub_directory(self, sub_directory, # type: Union[DirectoryProperties, str]

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -361,7 +361,7 @@ def _upload_options( # pylint:disable=too-many-statements
361361
elif hasattr(data, '__aiter__'):
362362
stream = AsyncIterStreamer(data, encoding=encoding)
363363
else:
364-
raise TypeError("Unsupported data type: {}".format(type(data)))
364+
raise TypeError(f"Unsupported data type: {type(data)}")
365365

366366
validate_content = kwargs.pop('validate_content', False)
367367
content_settings = kwargs.pop('content_settings', None)
@@ -838,16 +838,13 @@ def rename_file(self, new_name, **kwargs):
838838
new_file_system, new_path, new_file_sas = self._parse_rename_path(new_name)
839839

840840
new_file_client = DataLakeFileClient(
841-
"{}://{}".format(self.scheme, self.primary_hostname), new_file_system, file_path=new_path,
841+
f"{self.scheme}://{self.primary_hostname}", new_file_system, file_path=new_path,
842842
credential=self._raw_credential or new_file_sas,
843843
_hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline,
844844
_location_mode=self._location_mode
845845
)
846846
new_file_client._rename_path( # pylint: disable=protected-access
847-
'/{}/{}{}'.format(quote(unquote(self.file_system_name)),
848-
quote(unquote(self.path_name)),
849-
self._query_str),
850-
**kwargs)
847+
f'/{quote(unquote(self.file_system_name))}/{quote(unquote(self.path_name))}{self._query_str}', **kwargs)
851848
return new_file_client
852849

853850
def query_file(self, query_expression, **kwargs):

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ def __init__(
8585
raise ValueError("Account URL must be a string.")
8686
parsed_url = urlparse(account_url.rstrip('/'))
8787
if not parsed_url.netloc:
88-
raise ValueError("Invalid URL: {}".format(account_url))
88+
raise ValueError(f"Invalid URL: {account_url}")
8989

9090
blob_account_url = convert_dfs_url_to_blob_url(account_url)
9191
self._blob_account_url = blob_account_url
@@ -121,7 +121,7 @@ def close(self):
121121
def _format_url(self, hostname):
122122
"""Format the endpoint URL according to hostname
123123
"""
124-
formated_url = "{}://{}/{}".format(self.scheme, hostname, self._query_str)
124+
formated_url = f"{self.scheme}://{hostname}/{self._query_str}"
125125
return formated_url
126126

127127
@classmethod

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ def __init__(
8686
if not file_system_name:
8787
raise ValueError("Please specify a file system name.")
8888
if not parsed_url.netloc:
89-
raise ValueError("Invalid URL: {}".format(account_url))
89+
raise ValueError(f"Invalid URL: {account_url}")
9090

9191
blob_account_url = convert_dfs_url_to_blob_url(account_url)
9292
# TODO: add self.account_url to base_client and remove _blob_account_url
@@ -122,11 +122,7 @@ def _format_url(self, hostname):
122122
file_system_name = self.file_system_name
123123
if isinstance(file_system_name, str):
124124
file_system_name = file_system_name.encode('UTF-8')
125-
return "{}://{}/{}{}".format(
126-
self.scheme,
127-
hostname,
128-
quote(file_system_name),
129-
self._query_str)
125+
return f"{self.scheme}://{hostname}/{quote(file_system_name)}{self._query_str}"
130126

131127
def __exit__(self, *args):
132128
self._container_client.close()
@@ -323,7 +319,7 @@ def _rename_file_system(self, new_name, **kwargs):
323319
self._container_client._rename_container(new_name, **kwargs) # pylint: disable=protected-access
324320
#TODO: self._raw_credential would not work with SAS tokens
325321
renamed_file_system = FileSystemClient(
326-
"{}://{}".format(self.scheme, self.primary_hostname), file_system_name=new_name,
322+
f"{self.scheme}://{self.primary_hostname}", file_system_name=new_name,
327323
credential=self._raw_credential, api_version=self.api_version, _configuration=self._config,
328324
_pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts)
329325
return renamed_file_system
@@ -877,7 +873,7 @@ def _undelete_path_options(self, deleted_path_name, deletion_id):
877873
except IndexError:
878874
url = url_and_token[0] + '/' + quoted_path
879875

880-
undelete_source = quoted_path + '?deletionid={}'.format(deletion_id) if deletion_id else None
876+
undelete_source = quoted_path + f'?deletionid={deletion_id}' if deletion_id else None
881877

882878
return quoted_path, url, undelete_source
883879

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -992,7 +992,7 @@ class AnalyticsLogging(GenLogging):
992992
"""
993993

994994
def __init__(self, **kwargs):
995-
self.version = kwargs.get('version', u'1.0')
995+
self.version = kwargs.get('version', '1.0')
996996
self.delete = kwargs.get('delete', False)
997997
self.read = kwargs.get('read', False)
998998
self.write = kwargs.get('write', False)
@@ -1027,7 +1027,7 @@ class Metrics(GenMetrics):
10271027
"""
10281028

10291029
def __init__(self, **kwargs):
1030-
self.version = kwargs.get('version', u'1.0')
1030+
self.version = kwargs.get('version', '1.0')
10311031
self.enabled = kwargs.get('enabled', False)
10321032
self.include_apis = kwargs.get('include_apis')
10331033
self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy()

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def __init__(
7878
if not (file_system_name and path_name):
7979
raise ValueError("Please specify a file system name and file path.")
8080
if not parsed_url.netloc:
81-
raise ValueError("Invalid URL: {}".format(account_url))
81+
raise ValueError(f"Invalid URL: {account_url}")
8282

8383
blob_account_url = convert_dfs_url_to_blob_url(account_url)
8484
self._blob_account_url = blob_account_url
@@ -131,12 +131,8 @@ def _format_url(self, hostname):
131131
file_system_name = self.file_system_name
132132
if isinstance(file_system_name, str):
133133
file_system_name = file_system_name.encode('UTF-8')
134-
return "{}://{}/{}/{}{}".format(
135-
self.scheme,
136-
hostname,
137-
quote(file_system_name),
138-
quote(self.path_name, safe='~'),
139-
self._query_str)
134+
return (f"{self.scheme}://{hostname}/{quote(file_system_name)}/"
135+
f"{quote(self.path_name, safe='~')}{self._query_str}")
140136

141137
def _create_path_options(self, resource_type,
142138
content_settings=None, # type: Optional[ContentSettings]

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ def get_api_version(kwargs):
3737
api_version = kwargs.get('api_version', None)
3838
if api_version and api_version not in _SUPPORTED_API_VERSIONS:
3939
versions = '\n'.join(_SUPPORTED_API_VERSIONS)
40-
raise ValueError("Unsupported API version '{}'. Please select from:\n{}".format(api_version, versions))
40+
raise ValueError(f"Unsupported API version '{api_version}'. Please select from:\n{versions}")
4141
return api_version or _SUPPORTED_API_VERSIONS[-1]
4242

4343

@@ -49,15 +49,14 @@ def convert_datetime_to_rfc1123(date):
4949
weekday = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"][date.weekday()]
5050
month = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep",
5151
"Oct", "Nov", "Dec"][date.month - 1]
52-
return "%s, %02d %s %04d %02d:%02d:%02d GMT" % (weekday, date.day, month,
53-
date.year, date.hour, date.minute, date.second)
52+
return f"{weekday}, {date.day:02} {month} {date.year:04} {date.hour:02}:{date.minute:02}:{date.second:02} GMT"
5453

5554

5655
def add_metadata_headers(metadata=None):
5756
# type: (Optional[Dict[str, str]]) -> str
5857
if not metadata:
5958
return None
60-
headers = list()
59+
headers = []
6160
if metadata:
6261
for key, value in metadata.items():
6362
headers.append(key + '=')

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/authentication.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def _storage_header_sort(input_headers: List[Tuple[str, str]]) -> List[Tuple[str
4040
custom_weights = "-!#$%&*.^_|~+\"\'(),/`~0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]abcdefghijklmnopqrstuvwxyz{}"
4141

4242
# Build dict of tuples and list of keys
43-
header_dict = dict()
43+
header_dict = {}
4444
header_keys = []
4545
for k, v in input_headers:
4646
header_dict[k] = v

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/request_handlers.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,7 @@ def serialize_iso(attr):
4242
if utc.tm_year > 9999 or utc.tm_year < 1:
4343
raise OverflowError("Hit max or min date")
4444

45-
date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
46-
utc.tm_year, utc.tm_mon, utc.tm_mday,
47-
utc.tm_hour, utc.tm_min, utc.tm_sec)
45+
date = f"{utc.tm_year:04}-{utc.tm_mon:02}-{utc.tm_mday:02}T{utc.tm_hour:02}:{utc.tm_min:02}:{utc.tm_sec:02}"
4846
return date + 'Z'
4947
except (ValueError, OverflowError) as err:
5048
msg = "Unable to serialize datetime object."
@@ -178,7 +176,7 @@ def serialize_batch_body(requests, batch_id):
178176

179177
delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode('utf-8')
180178
newline_bytes = _HTTP_LINE_ENDING.encode('utf-8')
181-
batch_body = list()
179+
batch_body = []
182180

183181
content_index = 0
184182
for request in requests:
@@ -237,7 +235,7 @@ def _make_body_from_sub_request(sub_request):
237235
"""
238236

239237
# put the sub-request's headers into a list for efficient str concatenation
240-
sub_request_body = list()
238+
sub_request_body = []
241239

242240
# get headers for ease of manipulation; remove headers as they are used
243241
headers = sub_request.headers

sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/uploads.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -268,7 +268,7 @@ def _upload_chunk(self, chunk_offset, chunk_data):
268268

269269
def _upload_substream_block(self, index, block_stream):
270270
try:
271-
block_id = f'BlockId{"%05d" % (index/self.chunk_size)}'
271+
block_id = f'BlockId{(index/self.chunk_size):05}'
272272
self.service.stage_block(
273273
block_id,
274274
len(block_stream),
@@ -579,8 +579,6 @@ def seekable(self):
579579
def __next__(self):
580580
return next(self.iterator)
581581

582-
next = __next__ # Python 2 compatibility.
583-
584582
def tell(self, *args, **kwargs):
585583
raise UnsupportedOperation("Data generator does not support tell.")
586584

0 commit comments

Comments
 (0)