Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion python/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,8 @@ BOOKING_SAMPLE_CLIENT_SECRET=""
BOOKING_SAMPLE_BUSINESS_ID=""
BOOKING_SAMPLE_SERVICE_ID=""
CREW_AI_ENDPOINT=""
CREW_AI_TOKEN=""
CREW_AI_TOKEN=""
ASTRAFLOW_API_KEY=""
ASTRAFLOW_CN_API_KEY=""
ASTRAFLOW_CHAT_MODEL_ID=""
ASTRAFLOW_EMBEDDING_MODEL_ID=""
78 changes: 78 additions & 0 deletions python/semantic_kernel/connectors/ai/astraflow/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# semantic_kernel.connectors.ai.astraflow

This connector enables integration with **Astraflow** (by UCloud / 优刻得), an OpenAI-compatible AI model aggregation platform that supports 200+ models.

Sign up at https://astraflow.ucloud.cn/

## Endpoints

| Region | Base URL | Env var |
|--------|----------|---------|
| Global (US/CA) | `https://api-us-ca.umodelverse.ai/v1` | `ASTRAFLOW_API_KEY` |
| China | `https://api.modelverse.cn/v1` | `ASTRAFLOW_CN_API_KEY` |

## Quick start

### Initialize the kernel
```python
import semantic_kernel as sk
kernel = sk.Kernel()
```

### Add Astraflow text embedding service
```python
from semantic_kernel.connectors.ai.astraflow import AstraflowTextEmbedding

embedding_service = AstraflowTextEmbedding(
ai_model_id="BAAI/bge-m3", # model ID on the platform
api_key="your-astraflow-api-key", # or set ASTRAFLOW_API_KEY env var
service_id="astraflow-embeddings", # optional
)
kernel.add_service(embedding_service)
```

### Generate embeddings
```python
texts = ["Hello, world!", "Semantic Kernel is awesome"]
embeddings = await kernel.get_service("astraflow-embeddings").generate_embeddings(texts)
```

### Add Astraflow chat completion service
```python
from semantic_kernel.connectors.ai.astraflow import AstraflowChatCompletion

chat_service = AstraflowChatCompletion(
ai_model_id="deepseek-ai/DeepSeek-V3", # any model on the platform
api_key="your-astraflow-api-key", # or set ASTRAFLOW_API_KEY env var
service_id="astraflow-chat", # optional
)
kernel.add_service(chat_service)
```

### Basic chat completion
```python
response = await kernel.invoke_prompt("Hello, how are you?")
```

### China endpoint
```python
chat_cn = AstraflowChatCompletion(
ai_model_id="deepseek-ai/DeepSeek-V3",
base_url="https://api.modelverse.cn/v1",
api_key="your-astraflow-cn-api-key", # or set ASTRAFLOW_CN_API_KEY env var
)
```

### Using with Chat Completion Agent
```python
from semantic_kernel.agents import ChatCompletionAgent
from semantic_kernel.connectors.ai.astraflow import AstraflowChatCompletion

agent = ChatCompletionAgent(
service=AstraflowChatCompletion(),
name="SK-Assistant",
instructions="You are a helpful assistant.",
)
response = await agent.get_response(messages="Write a haiku about Semantic Kernel.")
print(response.content)
```
19 changes: 19 additions & 0 deletions python/semantic_kernel/connectors/ai/astraflow/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Copyright (c) Microsoft. All rights reserved.

from semantic_kernel.connectors.ai.astraflow.prompt_execution_settings.astraflow_prompt_execution_settings import (
AstraflowChatPromptExecutionSettings,
AstraflowEmbeddingPromptExecutionSettings,
AstraflowPromptExecutionSettings,
)
from semantic_kernel.connectors.ai.astraflow.services.astraflow_chat_completion import AstraflowChatCompletion
from semantic_kernel.connectors.ai.astraflow.services.astraflow_text_embedding import AstraflowTextEmbedding
from semantic_kernel.connectors.ai.astraflow.settings.astraflow_settings import AstraflowSettings

__all__ = [
"AstraflowChatCompletion",
"AstraflowChatPromptExecutionSettings",
"AstraflowEmbeddingPromptExecutionSettings",
"AstraflowPromptExecutionSettings",
"AstraflowSettings",
"AstraflowTextEmbedding",
]
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# Copyright (c) Microsoft. All rights reserved.
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
# Copyright (c) Microsoft. All rights reserved.

from typing import Annotated, Any, Literal

from pydantic import BaseModel, Field

from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings


class AstraflowPromptExecutionSettings(PromptExecutionSettings):
"""Base settings for Astraflow prompt execution."""


class AstraflowEmbeddingPromptExecutionSettings(AstraflowPromptExecutionSettings):
"""Settings for Astraflow embedding prompt execution."""

input: str | list[str] | None = None
ai_model_id: Annotated[str | None, Field(serialization_alias="model")] = None
encoding_format: Literal["float", "base64"] = "float"
user: str | None = None
extra_headers: dict | None = None
extra_body: dict | None = None
timeout: float | None = None
dimensions: Annotated[int | None, Field(gt=0)] = None

def prepare_settings_dict(self, **kwargs: Any) -> dict[str, Any]:
"""Return a dict of settings ready to pass to the OpenAI embeddings endpoint."""
return self.model_dump(
exclude={"service_id", "extension_data", "structured_json_response"},
exclude_none=True,
by_alias=True,
)


class AstraflowChatPromptExecutionSettings(AstraflowPromptExecutionSettings):
"""Settings for Astraflow chat prompt execution."""

messages: list[dict[str, str]] | None = None
ai_model_id: Annotated[str | None, Field(serialization_alias="model")] = None
temperature: float | None = None
top_p: float | None = None
n: int | None = None
stream: bool = False
stop: str | list[str] | None = None
max_tokens: int | None = None
presence_penalty: float | None = None
frequency_penalty: float | None = None
logit_bias: dict[str, float] | None = None
user: str | None = None
tools: list[dict[str, Any]] | None = None
tool_choice: str | dict[str, Any] | None = None
response_format: (
dict[Literal["type"], Literal["text", "json_object"]] | dict[str, Any] | type[BaseModel] | type | None
) = None
seed: int | None = None
extra_headers: dict | None = None
extra_body: dict | None = None
timeout: float | None = None

def prepare_settings_dict(self, **kwargs: Any) -> dict[str, Any]:
"""Return a dict of settings ready to pass to the OpenAI chat completions endpoint."""
return self.model_dump(
exclude={"service_id", "extension_data", "structured_json_response", "response_format"},
exclude_none=True,
by_alias=True,
)
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# Copyright (c) Microsoft. All rights reserved.
Loading
Loading