diff --git a/workflows/.env.example b/workflows/.env.example index 3d6a3f43..b93375a3 100644 --- a/workflows/.env.example +++ b/workflows/.env.example @@ -1,2 +1,3 @@ -# We support all langchain models, openai only for demo purposes -OPENAI_API_KEY= \ No newline at end of file +# We support all openrouter models and langchain openai for demo purposes +OPENAI_API_KEY= +OPENROUTER_API_KEY= \ No newline at end of file diff --git a/workflows/cli.py b/workflows/cli.py index e442eb4d..dbce85fe 100644 --- a/workflows/cli.py +++ b/workflows/cli.py @@ -14,6 +14,7 @@ from workflow_use.builder.service import BuilderService from workflow_use.controller.service import WorkflowController +from workflow_use.llm.openrouter import ChatOpenRouter from workflow_use.recorder.service import RecordingService # Added import from workflow_use.workflow.service import Workflow @@ -29,14 +30,22 @@ # Default LLM instance to None llm_instance = None + try: - llm_instance = ChatOpenAI(model='gpt-4o') + if os.getenv("OPENROUTER_API_KEY"): + llm_instance = ChatOpenRouter(model="openai/gpt-4o") + elif os.getenv("OPENAI_API_KEY"): + llm_instance = ChatOpenAI(model="gpt-4o") except Exception as e: - typer.secho(f'Error initializing LLM: {e}. Would you like to set your OPENAI_API_KEY?', fg=typer.colors.RED) - set_openai_api_key = input('Set OPENAI_API_KEY? (y/n): ') - if set_openai_api_key.lower() == 'y': - os.environ['OPENAI_API_KEY'] = input('Enter your OPENAI_API_KEY: ') - llm_instance = ChatOpenAI(model='gpt-4o') + typer.secho(f"Error initialising LLM: {e}", fg=typer.colors.RED) + raise typer.Exit(code=1) + +if llm_instance is None: + typer.secho( + "No LLM API key found. Set OPENROUTER_API_KEY or OPENAI_API_KEY.", + fg=typer.colors.RED, + ) + raise typer.Exit(code=1) builder_service = BuilderService(llm=llm_instance) if llm_instance else None # recorder_service = RecorderService() # Placeholder diff --git a/workflows/workflow_use/llm/openrouter.py b/workflows/workflow_use/llm/openrouter.py new file mode 100644 index 00000000..37506ecf --- /dev/null +++ b/workflows/workflow_use/llm/openrouter.py @@ -0,0 +1,21 @@ +import os + +from langchain_openai import ChatOpenAI +from pydantic import SecretStr + +# Adds support for LLM integration via OpenRouter +class ChatOpenRouter(ChatOpenAI): + def __init__( + self, + model: str, + openai_api_key: str | None = None, + openai_api_base: str = 'https://openrouter.ai/api/v1', + **kwargs, + ): + key = openai_api_key + super().__init__( + openai_api_base=openai_api_base, + openai_api_key=SecretStr(key) if key else None, + model_name=model, + **kwargs, + )