Skip to content

Commit 9069046

Browse files
cborneteyurtsev
andauthored
Remove no-untyped-def escapes (#17)
From langchain-ai/langchain#30873 --------- Co-authored-by: Eugene Yurtsev <[email protected]>
1 parent e6aaf25 commit 9069046

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+420
-184
lines changed

libs/community/langchain_community/chat_models/baichuan.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -575,8 +575,8 @@ def _chat(self, messages: List[BaseMessage], **kwargs: Any) -> requests.Response
575575
)
576576
return res
577577

578-
def _create_payload_parameters( # type: ignore[no-untyped-def]
579-
self, messages: List[BaseMessage], **kwargs
578+
def _create_payload_parameters(
579+
self, messages: List[BaseMessage], **kwargs: Any
580580
) -> Dict[str, Any]:
581581
parameters = {**self._default_params, **kwargs}
582582
temperature = parameters.pop("temperature", 0.3)
@@ -600,7 +600,7 @@ def _create_payload_parameters( # type: ignore[no-untyped-def]
600600

601601
return payload
602602

603-
def _create_headers_parameters(self, **kwargs) -> Dict[str, Any]: # type: ignore[no-untyped-def]
603+
def _create_headers_parameters(self, **kwargs: Any) -> Dict[str, Any]:
604604
parameters = {**self._default_params, **kwargs}
605605
default_headers = parameters.pop("headers", {})
606606
api_key = ""

libs/community/langchain_community/chat_models/minimax.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -439,8 +439,8 @@ def _create_chat_result(self, response: Union[dict, BaseModel]) -> ChatResult:
439439
}
440440
return ChatResult(generations=generations, llm_output=llm_output)
441441

442-
def _create_payload_parameters( # type: ignore[no-untyped-def]
443-
self, messages: List[BaseMessage], is_stream: bool = False, **kwargs
442+
def _create_payload_parameters(
443+
self, messages: List[BaseMessage], is_stream: bool = False, **kwargs: Any
444444
) -> Dict[str, Any]:
445445
"""Create API request body parameters."""
446446
message_dicts = [_convert_message_to_dict(m) for m in messages]

libs/community/langchain_community/document_loaders/chm.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
from pathlib import Path
2-
from typing import TYPE_CHECKING, Any, Dict, List, Union
2+
from types import TracebackType
3+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
4+
5+
from typing_extensions import Self
36

47
from langchain_community.document_loaders.unstructured import UnstructuredFileLoader
58

@@ -65,10 +68,15 @@ def __init__(self, path: str):
6568
self.file = chm.CHMFile()
6669
self.file.LoadCHM(path)
6770

68-
def __enter__(self): # type: ignore[no-untyped-def]
71+
def __enter__(self) -> Self:
6972
return self
7073

71-
def __exit__(self, exc_type, exc_value, traceback): # type: ignore[no-untyped-def]
74+
def __exit__(
75+
self,
76+
exc_type: Optional[type[BaseException]],
77+
exc_value: Optional[BaseException],
78+
traceback: Optional[TracebackType],
79+
) -> None:
7280
if self.file:
7381
self.file.CloseCHM()
7482

libs/community/langchain_community/document_loaders/mediawikidump.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
import logging
22
from pathlib import Path
3-
from typing import Iterator, Optional, Sequence, Union
3+
from typing import TYPE_CHECKING, Iterator, Optional, Sequence, Union
44

55
from langchain_core.documents import Document
66

77
from langchain_community.document_loaders.base import BaseLoader
88

99
logger = logging.getLogger(__name__)
1010

11+
if TYPE_CHECKING:
12+
import mwxml
13+
1114

1215
class MWDumpLoader(BaseLoader):
1316
"""Load `MediaWiki` dump from an `XML` file.
@@ -60,7 +63,7 @@ def __init__(
6063
self.skip_redirects = skip_redirects
6164
self.stop_on_error = stop_on_error
6265

63-
def _load_dump_file(self): # type: ignore[no-untyped-def]
66+
def _load_dump_file(self) -> "mwxml.Dump":
6467
try:
6568
import mwxml
6669
except ImportError as e:
@@ -70,7 +73,7 @@ def _load_dump_file(self): # type: ignore[no-untyped-def]
7073

7174
return mwxml.Dump.from_file(open(self.file_path, encoding=self.encoding))
7275

73-
def _load_single_page_from_dump(self, page) -> Document: # type: ignore[no-untyped-def, return]
76+
def _load_single_page_from_dump(self, page: "mwxml.Page") -> Document: # type: ignore[return]
7477
"""Parse a single page."""
7578
try:
7679
import mwparserfromhell

libs/community/langchain_community/document_loaders/parsers/doc_intelligence.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from __future__ import annotations
22

33
import logging
4-
from typing import TYPE_CHECKING, Any, Iterator, List, Optional
4+
from typing import TYPE_CHECKING, Any, Iterator, List, Optional, Union
55

66
from langchain_core.documents import Document
77

@@ -34,20 +34,24 @@ def __init__(
3434

3535
kwargs = {}
3636

37-
if api_key is None and azure_credential is None:
37+
credential: Union[AzureKeyCredential, TokenCredential]
38+
if azure_credential:
39+
if api_key is not None:
40+
raise ValueError(
41+
"Only one of api_key or azure_credential should be provided."
42+
)
43+
credential = azure_credential
44+
elif api_key is not None:
45+
credential = AzureKeyCredential(api_key)
46+
else:
3847
raise ValueError("Either api_key or azure_credential must be provided.")
3948

40-
if api_key and azure_credential:
41-
raise ValueError(
42-
"Only one of api_key or azure_credential should be provided."
43-
)
44-
4549
if api_version is not None:
4650
kwargs["api_version"] = api_version
4751

4852
self.client = DocumentIntelligenceClient(
4953
endpoint=api_endpoint,
50-
credential=azure_credential or AzureKeyCredential(api_key),
54+
credential=credential,
5155
headers={"x-ms-useragent": "langchain-parser/1.0.0"},
5256
**kwargs,
5357
)

libs/community/langchain_community/embeddings/gradient_ai.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -169,5 +169,5 @@ class TinyAsyncGradientEmbeddingClient: #: :meta private:
169169
It might be entirely removed in the future.
170170
"""
171171

172-
def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def]
172+
def __init__(self, *args: Any, **kwargs: Any) -> None:
173173
raise ValueError("Deprecated,TinyAsyncGradientEmbeddingClient was removed.")

libs/community/langchain_community/embeddings/oci_generative_ai.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
from enum import Enum
2-
from typing import Any, Dict, Iterator, List, Mapping, Optional
2+
from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Mapping, Optional
33

44
from langchain_core.embeddings import Embeddings
55
from langchain_core.utils import pre_init
66
from pydantic import BaseModel, ConfigDict
77

8+
if TYPE_CHECKING:
9+
import oci
10+
811
CUSTOM_ENDPOINT_PREFIX = "ocid1.generativeaiendpoint"
912

1013

@@ -122,12 +125,14 @@ def validate_environment(cls, values: Dict) -> Dict: # pylint: disable=no-self-
122125
client_kwargs.pop("signer", None)
123126
elif values["auth_type"] == OCIAuthType(2).name:
124127

125-
def make_security_token_signer(oci_config): # type: ignore[no-untyped-def]
128+
def make_security_token_signer(
129+
oci_config: dict[str, Any],
130+
) -> "oci.auth.signers.SecurityTokenSigner":
126131
pk = oci.signer.load_private_key_from_file(
127132
oci_config.get("key_file"), None
128133
)
129134
with open(
130-
oci_config.get("security_token_file"), encoding="utf-8"
135+
str(oci_config.get("security_token_file")), encoding="utf-8"
131136
) as f:
132137
st_string = f.read()
133138
return oci.auth.signers.SecurityTokenSigner(st_string, pk)

libs/community/langchain_community/embeddings/yandex.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -159,18 +159,20 @@ def _create_retry_decorator(llm: YandexGPTEmbeddings) -> Callable[[Any], Any]:
159159
)
160160

161161

162-
def _embed_with_retry(llm: YandexGPTEmbeddings, **kwargs: Any) -> Any:
162+
def _embed_with_retry(llm: YandexGPTEmbeddings, **kwargs: Any) -> list[list[float]]:
163163
"""Use tenacity to retry the embedding call."""
164164
retry_decorator = _create_retry_decorator(llm)
165165

166166
@retry_decorator
167-
def _completion_with_retry(**_kwargs: Any) -> Any:
167+
def _completion_with_retry(**_kwargs: Any) -> list[list[float]]:
168168
return _make_request(llm, **_kwargs)
169169

170170
return _completion_with_retry(**kwargs)
171171

172172

173-
def _make_request(self: YandexGPTEmbeddings, texts: List[str], **kwargs): # type: ignore[no-untyped-def]
173+
def _make_request(
174+
self: YandexGPTEmbeddings, texts: List[str], **kwargs: Any
175+
) -> list[list[float]]:
174176
try:
175177
import grpc
176178

libs/community/langchain_community/graphs/ontotext_graphdb_graph.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -93,10 +93,11 @@ def __init__(
9393
self.graph = rdflib.Graph(store, identifier=None, bind_namespaces="none")
9494
self._check_connectivity()
9595

96+
ontology_schema_graph: "rdflib.Graph"
9697
if local_file:
9798
ontology_schema_graph = self._load_ontology_schema_from_file(
9899
local_file,
99-
local_file_format, # type: ignore[arg-type]
100+
local_file_format,
100101
)
101102
else:
102103
self._validate_user_query(query_ontology) # type: ignore[arg-type]
@@ -140,7 +141,9 @@ def _check_connectivity(self) -> None:
140141
)
141142

142143
@staticmethod
143-
def _load_ontology_schema_from_file(local_file: str, local_file_format: str = None): # type: ignore[no-untyped-def, assignment]
144+
def _load_ontology_schema_from_file(
145+
local_file: str, local_file_format: Optional[str] = None
146+
) -> "rdflib.ConjunctiveGraph":
144147
"""
145148
Parse the ontology schema statements from the provided file
146149
"""
@@ -177,7 +180,7 @@ def _validate_user_query(query_ontology: str) -> None:
177180
"Invalid query type. Only CONSTRUCT queries are supported."
178181
)
179182

180-
def _load_ontology_schema_with_query(self, query: str): # type: ignore[no-untyped-def]
183+
def _load_ontology_schema_with_query(self, query: str) -> "rdflib.Graph":
181184
"""
182185
Execute the query for collecting the ontology schema statements
183186
"""
@@ -188,6 +191,9 @@ def _load_ontology_schema_with_query(self, query: str): # type: ignore[no-untyp
188191
except ParserError as e:
189192
raise ValueError(f"Generated SPARQL statement is invalid\n{e}")
190193

194+
if not results.graph:
195+
raise ValueError("Missing graph in results.")
196+
191197
return results.graph
192198

193199
@property

libs/community/langchain_community/graphs/tigergraph_graph.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ def generate_schema(
7777
"""
7878
return self._conn.getSchema(force=True)
7979

80-
def refresh_schema(self): # type: ignore[no-untyped-def]
80+
def refresh_schema(self) -> None:
8181
self.generate_schema()
8282

8383
def query(self, query: str) -> Dict[str, Any]: # type: ignore[override]

0 commit comments

Comments
 (0)