Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions elasticsearch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,15 @@
from ._version import __versionstr__

# Ensure that a compatible version of elastic-transport is installed.
_version_groups = tuple(int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", _elastic_transport_version).groups()) # type: ignore
_version_groups = tuple(int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", _elastic_transport_version).groups()) # type: ignore[union-attr]
if _version_groups < (8, 0, 0) or _version_groups > (9, 0, 0):
raise ImportError(
"An incompatible version of elastic-transport is installed. Must be between "
"v8.0.0 and v9.0.0. Install the correct version with the following command: "
"$ python -m pip install 'elastic-transport>=8, <9'"
)

_version_groups = re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() # type: ignore
_version_groups = re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() # type: ignore[assignment, union-attr]
_major, _minor, _patch = (int(x) for x in _version_groups)
VERSION = __version__ = (_major, _minor, _patch)

Expand Down
2 changes: 1 addition & 1 deletion elasticsearch/_async/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ async def map_actions() -> AsyncIterable[_TYPE_BULK_ACTION_HEADER_AND_BODY]:
]
ok: bool
info: Dict[str, Any]
async for data, (ok, info) in azip( # type: ignore
async for data, (ok, info) in azip( # type: ignore[assignment, misc]
bulk_data,
_process_bulk_chunk(
client,
Expand Down
2 changes: 1 addition & 1 deletion elasticsearch/_sync/client/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ def host_mapping_to_node_config(host: Mapping[str, Union[str, int]]) -> NodeConf
)
options["path_prefix"] = options.pop("url_prefix")

return NodeConfig(**options) # type: ignore
return NodeConfig(**options) # type: ignore[arg-type]


def cloud_id_to_node_configs(cloud_id: str) -> List[NodeConfig]:
Expand Down
3 changes: 0 additions & 3 deletions elasticsearch/dsl/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,9 +104,6 @@
from .utils import AttrDict, AttrList, DslBase
from .wrappers import Range

VERSION = (8, 17, 1)
__version__ = VERSION
__versionstr__ = ".".join(map(str, VERSION))
__all__ = [
"A",
"Agg",
Expand Down
4 changes: 2 additions & 2 deletions elasticsearch/dsl/_async/document.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,11 +241,11 @@ async def mget(
error_ids = [doc["_id"] for doc in error_docs]
message = "Required routing not provided for documents %s."
message %= ", ".join(error_ids)
raise RequestError(400, message, error_docs) # type: ignore
raise RequestError(400, message, error_docs) # type: ignore[arg-type]
if missing_docs:
missing_ids = [doc["_id"] for doc in missing_docs]
message = f"Documents {', '.join(missing_ids)} not found."
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore[arg-type]
return objs

async def delete(
Expand Down
4 changes: 2 additions & 2 deletions elasticsearch/dsl/_sync/document.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,11 +235,11 @@ def mget(
error_ids = [doc["_id"] for doc in error_docs]
message = "Required routing not provided for documents %s."
message %= ", ".join(error_ids)
raise RequestError(400, message, error_docs) # type: ignore
raise RequestError(400, message, error_docs) # type: ignore[arg-type]
if missing_docs:
missing_ids = [doc["_id"] for doc in missing_docs]
message = f"Documents {', '.join(missing_ids)} not found."
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore[arg-type]
return objs

def delete(
Expand Down
2 changes: 1 addition & 1 deletion elasticsearch/dsl/aggs.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ def pipeline(
return cast("Pipeline[_R]", self._agg(False, name, agg_type, *args, **params))

def result(self, search: "SearchBase[_R]", data: Any) -> AttrDict[Any]:
return BucketData(self, search, data) # type: ignore
return BucketData(self, search, data) # type: ignore[arg-type]


class Bucket(AggBase[_R], Agg[_R]):
Expand Down
12 changes: 6 additions & 6 deletions elasticsearch/dsl/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,9 @@ def _type_shortcut(
return name_or_instance # type: ignore[return-value]

if not (type or kwargs):
return cls.get_dsl_class("builtin")(name_or_instance) # type: ignore
return cls.get_dsl_class("builtin")(name_or_instance) # type: ignore[no-any-return, attr-defined]

return cls.get_dsl_class(type, "custom")( # type: ignore
return cls.get_dsl_class(type, "custom")( # type: ignore[no-any-return, attr-defined]
name_or_instance, type or "custom", **kwargs
)

Expand All @@ -54,13 +54,13 @@ def __init__(self, filter_name: str, builtin_type: str = "custom", **kwargs: Any

def to_dict(self) -> Dict[str, Any]:
# only name to present in lists
return self._name # type: ignore
return self._name # type: ignore[return-value]

def get_definition(self) -> Dict[str, Any]:
d = super().to_dict() # type: ignore
d = super().to_dict() # type: ignore[misc]
d = d.pop(self.name)
d["type"] = self._builtin_type
return d # type: ignore
return d # type: ignore[no-any-return]


class CustomAnalysisDefinition(CustomAnalysis):
Expand Down Expand Up @@ -111,7 +111,7 @@ def __init__(self, name: str):

def to_dict(self) -> Dict[str, Any]:
# only name to present in lists
return self._name # type: ignore
return self._name # type: ignore[return-value]


class Analyzer(AnalysisBase, DslBase):
Expand Down
8 changes: 4 additions & 4 deletions elasticsearch/dsl/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,16 +116,16 @@ def get_connection(self, alias: Union[str, _T] = "default") -> _T:
raise KeyError(f"There is no connection with alias {alias!r}.")

def _with_user_agent(self, conn: _T) -> _T:
from . import __versionstr__ # this is here to avoid circular imports
from elasticsearch import (
__versionstr__, # this is here to avoid circular imports
)

# try to inject our user agent
if hasattr(conn, "_headers"):
is_frozen = conn._headers.frozen
if is_frozen:
conn._headers = conn._headers.copy()
conn._headers.update(
{"user-agent": f"elasticsearch-dsl-py/{__versionstr__}"}
)
conn._headers.update({"user-agent": f"elasticsearch-py/{__versionstr__}"})
if is_frozen:
conn._headers.freeze()
return conn
Expand Down
8 changes: 4 additions & 4 deletions elasticsearch/dsl/document_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
try:
from types import UnionType
except ImportError:
UnionType = None # type: ignore
UnionType = None # type: ignore[assignment, misc]

from typing_extensions import dataclass_transform

Expand Down Expand Up @@ -81,14 +81,14 @@ def __init__(self, name: str, field: Field):
def __getattr__(self, attr: str) -> "InstrumentedField":
try:
# first let's see if this is an attribute of this object
return super().__getattribute__(attr) # type: ignore
return super().__getattribute__(attr) # type: ignore[no-any-return]
except AttributeError:
try:
# next we see if we have a sub-field with this name
return InstrumentedField(f"{self._name}.{attr}", self._field[attr])
except KeyError:
# lastly we let the wrapped field resolve this attribute
return getattr(self._field, attr) # type: ignore
return getattr(self._field, attr) # type: ignore[no-any-return]

def __pos__(self) -> str:
"""Return the field name representation for ascending sort order"""
Expand Down Expand Up @@ -226,7 +226,7 @@ def __init__(self, name: str, bases: Tuple[type, ...], attrs: Dict[str, Any]):
field_args = [type_]
elif type_ in self.type_annotation_map:
# use best field type for the type hint provided
field, field_kwargs = self.type_annotation_map[type_] # type: ignore
field, field_kwargs = self.type_annotation_map[type_] # type: ignore[assignment]

if field:
field_kwargs = {
Expand Down
2 changes: 1 addition & 1 deletion elasticsearch/dsl/faceted_search_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def add_filter(self, filter_values: List[FilterValueType]) -> Optional[Query]:
f |= self.get_value_filter(v)
return f

def get_value_filter(self, filter_value: FilterValueType) -> Query: # type: ignore
def get_value_filter(self, filter_value: FilterValueType) -> Query: # type: ignore[empty-body]
"""
Construct a filter for an individual value
"""
Expand Down
10 changes: 5 additions & 5 deletions elasticsearch/dsl/field.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ def clean(self, data: Any) -> Optional[bool]:
data = self.deserialize(data)
if data is None and self._required:
raise ValidationException("Value required for this field.")
return data # type: ignore
return data # type: ignore[no-any-return]


class Float(Field):
Expand Down Expand Up @@ -515,12 +515,12 @@ class Percolator(Field):
_coerce = True

def _deserialize(self, data: Any) -> "Query":
return Q(data) # type: ignore
return Q(data) # type: ignore[no-any-return]

def _serialize(self, data: Any) -> Optional[Dict[str, Any]]:
if data is None:
return None
return data.to_dict() # type: ignore
return data.to_dict() # type: ignore[no-any-return]


class RangeField(Field):
Expand All @@ -530,15 +530,15 @@ class RangeField(Field):
def _deserialize(self, data: Any) -> Range["_SupportsComparison"]:
if isinstance(data, Range):
return data
data = {k: self._core_field.deserialize(v) for k, v in data.items()} # type: ignore
data = {k: self._core_field.deserialize(v) for k, v in data.items()} # type: ignore[union-attr]
return Range(data)

def _serialize(self, data: Any) -> Optional[Dict[str, Any]]:
if data is None:
return None
if not isinstance(data, collections.abc.Mapping):
data = data.to_dict()
return {k: self._core_field.serialize(v) for k, v in data.items()} # type: ignore
return {k: self._core_field.serialize(v) for k, v in data.items()} # type: ignore[union-attr]


class IntegerRange(RangeField):
Expand Down
2 changes: 1 addition & 1 deletion elasticsearch/dsl/mapping_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def field(self, name: str, *args: Any, **kwargs: Any) -> Self:

def _collect_fields(self) -> Iterator[Field]:
"""Iterate over all Field objects within, including multi fields."""
fields = cast(Dict[str, Field], self.properties.to_dict()) # type: ignore
fields = cast(Dict[str, Field], self.properties.to_dict()) # type: ignore[attr-defined]
for f in fields.values():
yield f
# multi fields
Expand Down
Empty file removed elasticsearch/dsl/py.typed
Empty file.
2 changes: 1 addition & 1 deletion elasticsearch/dsl/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -637,7 +637,7 @@ def __init__(
functions = []
for name in ScoreFunction._classes:
if name in kwargs:
functions.append({name: kwargs.pop(name)}) # type: ignore
functions.append({name: kwargs.pop(name)}) # type: ignore[arg-type]
super().__init__(
boost_mode=boost_mode,
functions=functions,
Expand Down
6 changes: 3 additions & 3 deletions elasticsearch/dsl/response/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,9 +197,9 @@ def search_after(self) -> "SearchBase[_R]":
"""
if len(self.hits) == 0:
raise ValueError("Cannot use search_after when there are no search results")
if not hasattr(self.hits[-1].meta, "sort"): # type: ignore
if not hasattr(self.hits[-1].meta, "sort"): # type: ignore[attr-defined]
raise ValueError("Cannot use search_after when results are not sorted")
return self._search.extra(search_after=self.hits[-1].meta.sort) # type: ignore
return self._search.extra(search_after=self.hits[-1].meta.sort) # type: ignore[attr-defined]


AggregateResponseType = Union[
Expand Down Expand Up @@ -293,7 +293,7 @@ def __getitem__(self, attr_name: str) -> AggregateResponseType:
AggregateResponseType,
agg.result(self._meta["search"], self._d_[attr_name]),
)
return super().__getitem__(attr_name) # type: ignore
return super().__getitem__(attr_name) # type: ignore[no-any-return]

def __iter__(self) -> Iterator[AggregateResponseType]: # type: ignore[override]
for name in self._meta["aggs"]:
Expand Down
4 changes: 2 additions & 2 deletions elasticsearch/dsl/response/aggs.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def _wrap_bucket(self, data: Dict[str, Any]) -> Bucket[_R]:
)

def __iter__(self) -> Iterator["Agg"]: # type: ignore[override]
return iter(self.buckets) # type: ignore
return iter(self.buckets) # type: ignore[arg-type]

def __len__(self) -> int:
return len(self.buckets)
Expand All @@ -83,7 +83,7 @@ def buckets(self) -> Union[AttrDict[Any], AttrList[Any]]:
if isinstance(bs, list):
ret = AttrList(bs, obj_wrapper=self._wrap_bucket)
else:
ret = AttrDict[Any]({k: self._wrap_bucket(bs[k]) for k in bs}) # type: ignore
ret = AttrDict[Any]({k: self._wrap_bucket(bs[k]) for k in bs}) # type: ignore[assignment]
super(AttrDict, self).__setattr__("_buckets", ret)
return self._buckets

Expand Down
Loading
Loading