Built with Oya AI
You are a specialized stock market analyst assistant focused on analyzing stock prices and market trends to provide clear buy or sell recommendations. You deeply understand stock market indicators, price fluctuations, and investment timing but communicate in a clear and approachable manner accessible to users with basic to intermediate stock knowledge. Your expertise lies in synthesizing publicly available stock pricing data and market signals to deliver actionable trading suggestions while emphasizing risk awareness. You maintain a professional yet friendly tone, helping users make informed decisions without overwhelming them with jargon.
- Mode: skills
- Agent ID:
404778dd-8f11-44bd-a0de-52e875eadc12 - Model:
gemini/gemini-2.0-flash
Every deployed agent exposes an OpenAI-compatible API endpoint. Use any SDK or HTTP client that supports the OpenAI chat completions format.
Pass your API key via either header:
Authorization: Bearer a2a_your_key_hereX-API-Key: a2a_your_key_here
Create API keys at https://oya.ai/api-keys.
https://oya.ai/api/v1/chat/completions
curl -X POST https://oya.ai/api/v1/chat/completions \
-H "Authorization: Bearer a2a_your_key_here" \
-H "Content-Type: application/json" \
-d '{"model":"gemini/gemini-2.0-flash","messages":[{"role":"user","content":"Hello"}]}'
# Continue a conversation using thread_id from the first response:
curl -X POST https://oya.ai/api/v1/chat/completions \
-H "Authorization: Bearer a2a_your_key_here" \
-H "Content-Type: application/json" \
-d '{"model":"gemini/gemini-2.0-flash","messages":[{"role":"user","content":"Follow up"}],"thread_id":"THREAD_ID"}'from openai import OpenAI
client = OpenAI(
api_key="a2a_your_key_here",
base_url="https://oya.ai/api/v1",
)
# First message — starts a new thread
response = client.chat.completions.create(
model="gemini/gemini-2.0-flash",
messages=[{"role": "user", "content": "Hello"}],
)
print(response.choices[0].message.content)
# Continue the conversation using thread_id
thread_id = response.thread_id
response = client.chat.completions.create(
model="gemini/gemini-2.0-flash",
messages=[{"role": "user", "content": "Follow up question"}],
extra_body={"thread_id": thread_id},
)
print(response.choices[0].message.content)import OpenAI from "openai";
const client = new OpenAI({
apiKey: "a2a_your_key_here",
baseURL: "https://oya.ai/api/v1",
});
// First message — starts a new thread
const response = await client.chat.completions.create({
model: "gemini/gemini-2.0-flash",
messages: [{ role: "user", content: "Hello" }],
});
console.log(response.choices[0].message.content);
// Continue the conversation using thread_id
const threadId = (response as any).thread_id;
const followUp = await client.chat.completions.create({
model: "gemini/gemini-2.0-flash",
messages: [{ role: "user", content: "Follow up question" }],
// @ts-ignore — custom field
thread_id: threadId,
});
console.log(followUp.choices[0].message.content);// Package.swift:
// .package(url: "https://github.com/MacPaw/OpenAI.git", from: "0.4.0")
import Foundation
import OpenAI
@main
struct Main {
static func main() async throws {
let config = OpenAI.Configuration(
token: "a2a_your_key_here",
host: "oya.ai",
scheme: "https"
)
let client = OpenAI(configuration: config)
let query = ChatQuery(
messages: [.user(.init(content: .string("Hello")))],
model: "gemini/gemini-2.0-flash"
)
let result = try await withCheckedThrowingContinuation { continuation in
_ = client.chats(query: query) { continuation.resume(with: $0) }
}
print(result.choices.first?.message.content ?? "")
}
}// build.gradle.kts dependencies:
// implementation("com.aallam.openai:openai-client:4.0.1")
// implementation("io.ktor:ktor-client-cio:3.0.0")
import com.aallam.openai.api.chat.ChatCompletionRequest
import com.aallam.openai.api.chat.ChatMessage
import com.aallam.openai.api.chat.ChatRole
import com.aallam.openai.api.model.ModelId
import com.aallam.openai.client.OpenAI
import com.aallam.openai.client.OpenAIHost
import kotlinx.coroutines.runBlocking
fun main() = runBlocking {
val openai = OpenAI(
token = "a2a_your_key_here",
host = OpenAIHost(baseUrl = "https://oya.ai/api/v1/")
)
val completion = openai.chatCompletion(
ChatCompletionRequest(
model = ModelId("gemini/gemini-2.0-flash"),
messages = listOf(ChatMessage(role = ChatRole.User, content = "Hello"))
)
)
println(completion.choices.first().message.messageContent)
}stream = client.chat.completions.create(
model="gemini/gemini-2.0-flash",
messages=[{"role": "user", "content": "Tell me about AI agents"}],
stream=True,
)
for chunk in stream:
delta = chunk.choices[0].delta.content
if delta:
print(delta, end="", flush=True)<!-- Oya Chat Widget -->
<script
src="https://oya.ai/widget.js"
data-agent-id="404778dd-8f11-44bd-a0de-52e875eadc12"
data-api-key="a2a_your_key_here"
data-title="Stock Price Analyzer"
></script>gemini/gemini-2.0-flashgemini/gemini-2.5-flashgemini/gemini-2.5-progemini/gemini-3-flash-previewgemini/gemini-3-pro-previewanthropic/claude-sonnet-4-5-20241022anthropic/claude-haiku-4-5-20251001
Managed by Oya AI. Do not edit manually — changes are overwritten on each sync.