diff --git a/.env.example b/.env.example index 15cb666..3e96846 100644 --- a/.env.example +++ b/.env.example @@ -1,6 +1,7 @@ OPENAI_API_KEY= ANTHROPIC_API_KEY= GEMINI_API_KEY= +TONGYI_API_KEY= # Set to false to disable anonymized telemetry ANONYMIZED_TELEMETRY=false diff --git a/README.md b/README.md index 000f057..7439d61 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@

Tell your MacBook what to do, and it's done—across ANY app.

- Created by Ofir Ozeri ♥️ migrated in collaboration with Magnus and Gregor
+ Created by Ofir Ozeri ♥️ migrated in collaboration with Magnus and Gregor

@@ -37,7 +37,7 @@ Clone first git clone https://github.com/browser-use/macOS-use.git && cd macOS-use ``` -Don't forget API key
Supported providers: [OAI](https://platform.openai.com/docs/quickstart), [Anthropic](https://docs.anthropic.com/en/api/admin-api/apikeys/get-api-key) or [Gemini](https://ai.google.dev/gemini-api/docs/api-key) (deepseek R1 coming soon!) +Don't forget API key
Supported providers: [OAI](https://platform.openai.com/docs/quickstart), [Anthropic](https://docs.anthropic.com/en/api/admin-api/apikeys/get-api-key), [Gemini](https://ai.google.dev/gemini-api/docs/api-key) or [Tongyi Qwen](https://www.alibabacloud.com/en/product/modelstudio) (deepseek R1 coming soon!)
At the moment, macOS-use works best with OAI or Anthropic API, tho Gemini is free. it works great two, just not as reliably.
diff --git a/examples/try.py b/examples/try.py index 065ff9f..ac30431 100644 --- a/examples/try.py +++ b/examples/try.py @@ -18,7 +18,7 @@ def set_llm(llm_provider:str = None): if not llm_provider: raise ValueError("No llm provider was set") - + if llm_provider == "OAI": try: api_key = os.getenv('OPENAI_API_KEY') @@ -26,7 +26,7 @@ def set_llm(llm_provider:str = None): print(f"Error while getting API key: {e}") api_key = None return ChatOpenAI(model='gpt-4o', api_key=SecretStr(api_key)) - + if llm_provider == "google": try: api_key = os.getenv('GEMINI_API_KEY') @@ -34,10 +34,17 @@ def set_llm(llm_provider:str = None): print(f"Error while getting API key: {e}") api_key = None return ChatGoogleGenerativeAI(model='gemini-2.0-flash-exp', api_key=SecretStr(api_key)) - + if llm_provider == 'qwen': + try: + api_key = os.getenv('TONGYI_API_KEY') + except Exception as e: + print(f"Error while getting API key: {e}") + api_key = None + return ChatOpenAI(base_url='https://dashscope-intl.aliyuncs.com/compatible-mode/v1', model='qwen-plus', api_key=SecretStr(api_key)) + llm = set_llm('google') llm = set_llm('OAI') - +# llm = set_llm('qwen') controller = Controller() task = input("Hi there! What can I do for you today? ")