Skip to content

Commit 528c4c5

Browse files
review feedback
1 parent ec3e87f commit 528c4c5

29 files changed

+88
-88
lines changed

elasticsearch/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,15 +27,15 @@
2727
from ._version import __versionstr__
2828

2929
# Ensure that a compatible version of elastic-transport is installed.
30-
_version_groups = tuple(int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", _elastic_transport_version).groups()) # type: ignore
30+
_version_groups = tuple(int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", _elastic_transport_version).groups()) # type: ignore[union-attr]
3131
if _version_groups < (8, 0, 0) or _version_groups > (9, 0, 0):
3232
raise ImportError(
3333
"An incompatible version of elastic-transport is installed. Must be between "
3434
"v8.0.0 and v9.0.0. Install the correct version with the following command: "
3535
"$ python -m pip install 'elastic-transport>=8, <9'"
3636
)
3737

38-
_version_groups = re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() # type: ignore
38+
_version_groups = re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() # type: ignore[assignment, union-attr]
3939
_major, _minor, _patch = (int(x) for x in _version_groups)
4040
VERSION = __version__ = (_major, _minor, _patch)
4141

elasticsearch/_async/helpers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ async def map_actions() -> AsyncIterable[_TYPE_BULK_ACTION_HEADER_AND_BODY]:
257257
]
258258
ok: bool
259259
info: Dict[str, Any]
260-
async for data, (ok, info) in azip( # type: ignore
260+
async for data, (ok, info) in azip( # type: ignore[assignment, misc]
261261
bulk_data,
262262
_process_bulk_chunk(
263263
client,

elasticsearch/_sync/client/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,7 @@ def host_mapping_to_node_config(host: Mapping[str, Union[str, int]]) -> NodeConf
232232
)
233233
options["path_prefix"] = options.pop("url_prefix")
234234

235-
return NodeConfig(**options) # type: ignore
235+
return NodeConfig(**options) # type: ignore[arg-type]
236236

237237

238238
def cloud_id_to_node_configs(cloud_id: str) -> List[NodeConfig]:

elasticsearch/dsl/__init__.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -104,9 +104,6 @@
104104
from .utils import AttrDict, AttrList, DslBase
105105
from .wrappers import Range
106106

107-
VERSION = (8, 17, 1)
108-
__version__ = VERSION
109-
__versionstr__ = ".".join(map(str, VERSION))
110107
__all__ = [
111108
"A",
112109
"Agg",

elasticsearch/dsl/_async/document.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -241,11 +241,11 @@ async def mget(
241241
error_ids = [doc["_id"] for doc in error_docs]
242242
message = "Required routing not provided for documents %s."
243243
message %= ", ".join(error_ids)
244-
raise RequestError(400, message, error_docs) # type: ignore
244+
raise RequestError(400, message, error_docs) # type: ignore[arg-type]
245245
if missing_docs:
246246
missing_ids = [doc["_id"] for doc in missing_docs]
247247
message = f"Documents {', '.join(missing_ids)} not found."
248-
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore
248+
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore[arg-type]
249249
return objs
250250

251251
async def delete(

elasticsearch/dsl/_sync/document.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -235,11 +235,11 @@ def mget(
235235
error_ids = [doc["_id"] for doc in error_docs]
236236
message = "Required routing not provided for documents %s."
237237
message %= ", ".join(error_ids)
238-
raise RequestError(400, message, error_docs) # type: ignore
238+
raise RequestError(400, message, error_docs) # type: ignore[arg-type]
239239
if missing_docs:
240240
missing_ids = [doc["_id"] for doc in missing_docs]
241241
message = f"Documents {', '.join(missing_ids)} not found."
242-
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore
242+
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore[arg-type]
243243
return objs
244244

245245
def delete(

elasticsearch/dsl/aggs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ def pipeline(
193193
return cast("Pipeline[_R]", self._agg(False, name, agg_type, *args, **params))
194194

195195
def result(self, search: "SearchBase[_R]", data: Any) -> AttrDict[Any]:
196-
return BucketData(self, search, data) # type: ignore
196+
return BucketData(self, search, data) # type: ignore[arg-type]
197197

198198

199199
class Bucket(AggBase[_R], Agg[_R]):

elasticsearch/dsl/analysis.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,9 @@ def _type_shortcut(
3737
return name_or_instance # type: ignore[return-value]
3838

3939
if not (type or kwargs):
40-
return cls.get_dsl_class("builtin")(name_or_instance) # type: ignore
40+
return cls.get_dsl_class("builtin")(name_or_instance) # type: ignore[no-any-return, attr-defined]
4141

42-
return cls.get_dsl_class(type, "custom")( # type: ignore
42+
return cls.get_dsl_class(type, "custom")( # type: ignore[no-any-return, attr-defined]
4343
name_or_instance, type or "custom", **kwargs
4444
)
4545

@@ -54,13 +54,13 @@ def __init__(self, filter_name: str, builtin_type: str = "custom", **kwargs: Any
5454

5555
def to_dict(self) -> Dict[str, Any]:
5656
# only name to present in lists
57-
return self._name # type: ignore
57+
return self._name # type: ignore[return-value]
5858

5959
def get_definition(self) -> Dict[str, Any]:
60-
d = super().to_dict() # type: ignore
60+
d = super().to_dict() # type: ignore[misc]
6161
d = d.pop(self.name)
6262
d["type"] = self._builtin_type
63-
return d # type: ignore
63+
return d # type: ignore[no-any-return]
6464

6565

6666
class CustomAnalysisDefinition(CustomAnalysis):
@@ -111,7 +111,7 @@ def __init__(self, name: str):
111111

112112
def to_dict(self) -> Dict[str, Any]:
113113
# only name to present in lists
114-
return self._name # type: ignore
114+
return self._name # type: ignore[return-value]
115115

116116

117117
class Analyzer(AnalysisBase, DslBase):

elasticsearch/dsl/connections.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -116,16 +116,16 @@ def get_connection(self, alias: Union[str, _T] = "default") -> _T:
116116
raise KeyError(f"There is no connection with alias {alias!r}.")
117117

118118
def _with_user_agent(self, conn: _T) -> _T:
119-
from . import __versionstr__ # this is here to avoid circular imports
119+
from elasticsearch import (
120+
__versionstr__, # this is here to avoid circular imports
121+
)
120122

121123
# try to inject our user agent
122124
if hasattr(conn, "_headers"):
123125
is_frozen = conn._headers.frozen
124126
if is_frozen:
125127
conn._headers = conn._headers.copy()
126-
conn._headers.update(
127-
{"user-agent": f"elasticsearch-dsl-py/{__versionstr__}"}
128-
)
128+
conn._headers.update({"user-agent": f"elasticsearch-py/{__versionstr__}"})
129129
if is_frozen:
130130
conn._headers.freeze()
131131
return conn

elasticsearch/dsl/document_base.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
try:
3737
from types import UnionType
3838
except ImportError:
39-
UnionType = None # type: ignore
39+
UnionType = None # type: ignore[assignment, misc]
4040

4141
from typing_extensions import dataclass_transform
4242

@@ -81,14 +81,14 @@ def __init__(self, name: str, field: Field):
8181
def __getattr__(self, attr: str) -> "InstrumentedField":
8282
try:
8383
# first let's see if this is an attribute of this object
84-
return super().__getattribute__(attr) # type: ignore
84+
return super().__getattribute__(attr) # type: ignore[no-any-return]
8585
except AttributeError:
8686
try:
8787
# next we see if we have a sub-field with this name
8888
return InstrumentedField(f"{self._name}.{attr}", self._field[attr])
8989
except KeyError:
9090
# lastly we let the wrapped field resolve this attribute
91-
return getattr(self._field, attr) # type: ignore
91+
return getattr(self._field, attr) # type: ignore[no-any-return]
9292

9393
def __pos__(self) -> str:
9494
"""Return the field name representation for ascending sort order"""
@@ -226,7 +226,7 @@ def __init__(self, name: str, bases: Tuple[type, ...], attrs: Dict[str, Any]):
226226
field_args = [type_]
227227
elif type_ in self.type_annotation_map:
228228
# use best field type for the type hint provided
229-
field, field_kwargs = self.type_annotation_map[type_] # type: ignore
229+
field, field_kwargs = self.type_annotation_map[type_] # type: ignore[assignment]
230230

231231
if field:
232232
field_kwargs = {

0 commit comments

Comments
 (0)