Skip to content

Commit 576e4a5

Browse files
committed
Fix lint
1 parent 959cd79 commit 576e4a5

File tree

3 files changed

+22
-21
lines changed

3 files changed

+22
-21
lines changed

docs/examples/patch_leaf_subclasses/langchain_example.py

Lines changed: 17 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import argparse
22

3-
from langchain_core.language_models import BaseLLM, BaseChatModel
3+
from langchain_core.language_models import BaseChatModel, BaseLLM
44
from langchain_core.language_models.base import BaseLanguageModel
55

66
# important note: if you import these after patching, the patch won't apply!
@@ -14,19 +14,27 @@
1414

1515
def parse_args():
1616
parser = argparse.ArgumentParser(description="LangChain model comparison")
17-
parser.add_argument("--provider", choices=["ollama", "openai", "huggingface"], default="ollama",
18-
help="Choose model provider (default: ollama)")
17+
parser.add_argument(
18+
"--provider",
19+
choices=["ollama", "openai", "huggingface"],
20+
default="ollama",
21+
help="Choose model provider (default: ollama)",
22+
)
1923
parser.add_argument("--model", type=str, help="Specify model name")
20-
parser.add_argument("--prompt", type=str, default="What is the capital of France?",
21-
help="Input prompt")
24+
parser.add_argument(
25+
"--prompt",
26+
type=str,
27+
default="What is the capital of France?",
28+
help="Input prompt",
29+
)
2230

2331
return parser.parse_args()
2432

2533

2634
def chat_with_model(model: BaseLanguageModel, prompt: str) -> str:
2735
try:
2836
response = model.invoke(prompt)
29-
if hasattr(response, 'content'):
37+
if hasattr(response, "content"):
3038
return response.content
3139
else:
3240
return str(response)
@@ -35,24 +43,15 @@ def chat_with_model(model: BaseLanguageModel, prompt: str) -> str:
3543

3644

3745
def create_huggingface_model(model: str = "google/flan-t5-small"):
38-
return HuggingFaceEndpoint(
39-
repo_id=model,
40-
temperature=0.7
41-
)
46+
return HuggingFaceEndpoint(repo_id=model, temperature=0.7)
4247

4348

4449
def create_openai_model(model: str = "gpt-3.5-turbo"):
45-
return ChatOpenAI(
46-
model=model,
47-
temperature=0.7
48-
)
50+
return ChatOpenAI(model=model, temperature=0.7)
4951

5052

5153
def create_ollama_model(model: str = "llama2"):
52-
return OllamaLLM(
53-
model=model,
54-
temperature=0.7
55-
)
54+
return OllamaLLM(model=model, temperature=0.7)
5655

5756

5857
def patch_llm():

docs/examples/patch_leaf_subclasses/simple_example.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,8 @@ def greet(self):
1919
print("hola")
2020

2121

22-
if __name__ == '__main__':
22+
if __name__ == "__main__":
23+
2324
def my_wrapper(orig_fcn):
2425
def wrapped_fcn(self, *args, **kwargs):
2526
print("wrapper running")
@@ -28,7 +29,6 @@ def wrapped_fcn(self, *args, **kwargs):
2829

2930
return wrapped_fcn
3031

31-
3232
patch_leaf_subclasses(Greeter, "greet", my_wrapper)
3333

3434
EngishGreeter().greet()

opentelemetry-api/src/opentelemetry/util/_patch.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,9 @@ def patch_leaf_subclasses(base_class, method_name, wrapper):
1414
for subclass in leaf_subclasses:
1515
# Patch if the subclass has the method (either defined or inherited)
1616
# and it's actually callable
17-
if hasattr(subclass, method_name) and callable(getattr(subclass, method_name)):
17+
if hasattr(subclass, method_name) and callable(
18+
getattr(subclass, method_name)
19+
):
1820
old_method = getattr(subclass, method_name)
1921
setattr(subclass, method_name, wrapper(old_method))
2022

0 commit comments

Comments
 (0)