Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
OPENAI_API_KEY=your_openai_api_key_here
ANTHROPIC_API_KEY=your_anthropic_api_key_here
GOOGLE_API_KEY=your_google_api_key_here
DEEPSEEK_API_KEY=your_deepseek_api_key_here
GOOGLE_API_KEY=your_google_api_key_here
16 changes: 14 additions & 2 deletions tools/llm_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,14 @@ def create_llm_client(provider="openai"):
return OpenAI(
api_key=api_key
)
elif provider == "deepseek":
api_key = os.getenv('DEEPSEEK_API_KEY')
if not api_key:
raise ValueError("DEEPSEEK_API_KEY not found in environment variables")
return OpenAI(
api_key=api_key,
base_url="https://api.deepseek.com/v1",
)
elif provider == "anthropic":
api_key = os.getenv('ANTHROPIC_API_KEY')
if not api_key:
Expand Down Expand Up @@ -49,14 +57,16 @@ def query_llm(prompt, client=None, model=None, provider="openai"):
if model is None:
if provider == "openai":
model = "gpt-3.5-turbo"
elif provider == "deepseek":
model = "deepseek-chat"
elif provider == "anthropic":
model = "claude-3-sonnet-20240229"
elif provider == "gemini":
model = "gemini-pro"
elif provider == "local":
model = "Qwen/Qwen2.5-32B-Instruct-AWQ"

if provider == "openai" or provider == "local":
if provider == "openai" or provider == "local" or provider == "deepseek":
response = client.chat.completions.create(
model=model,
messages=[
Expand Down Expand Up @@ -85,13 +95,15 @@ def query_llm(prompt, client=None, model=None, provider="openai"):
def main():
parser = argparse.ArgumentParser(description='Query an LLM with a prompt')
parser.add_argument('--prompt', type=str, help='The prompt to send to the LLM', required=True)
parser.add_argument('--provider', choices=['openai','anthropic','gemini','local'], default='openai', help='The API provider to use')
parser.add_argument('--provider', choices=['openai','anthropic','gemini','local','deepseek'], default='openai', help='The API provider to use')
parser.add_argument('--model', type=str, help='The model to use (default depends on provider)')
args = parser.parse_args()

if not args.model:
if args.provider == 'openai':
args.model = "gpt-3.5-turbo"
elif args.provider == "deepseek":
args.model = "deepseek-chat"
elif args.provider == 'anthropic':
args.model = "claude-3-5-sonnet-20241022"
elif args.provider == 'gemini':
Expand Down