diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..9d253d07 --- /dev/null +++ b/.env.example @@ -0,0 +1,70 @@ +# Frontend +NEXT_PUBLIC_API_URL=http://localhost:8000/api +NEXT_PUBLIC_NETWORK_NAME=sanchonet +NEXT_PUBLIC_MANAGER_BASE_DOMAIN=agent_manager:3001 + +# API and Manager Common +KAFKA_PREFIX=testing +DATABASE_URL=postgresql://root:root@postgres:5432/cardano_autonomous_agent_testing_db +KAFKA_BROKERS=kafka:9093 +AGENT_MNEMONIC= + +# API +DOCS_URL=/api/docs +KAFKA_ENABLED=true +METADATA_BASE_URL=https://metadata.drep.id/api +DB_SYNC_BASE_URL=http://dbsync:9000/api + +# Agent Manager +CLIENT_ID=my-app +CARDANO_NODE_URL=172.31.0.4:3004 +KUBER_BASE_URL=https://sanchonet.kuber.cardanoapi.io +KUBER_API_KEY= +MANAGER_WALLET_ADDRESS= +MANAGER_WALLET_SIGNING_KEY= +FAUCET_API_KEY= +CARDANO_NETWORK_MAGIC=4 +BLOCKFROST_API_KEY= +ENABLE_BLOCKFROST_SUBMIT_API=True +NETWORK_NAME=sanchonet + +# Postgres +POSTGRES_DB=cardano_autonomous_agent_testing_db +POSTGRES_USER=root +POSTGRES_PASSWORD=root + +# DbSync +DBSYNC_PORT=9000 +DBSYNC_CORS_ENABLE=true +DBSYNC_DATABASE_URL= + +# KAFKA +KAFKA_BROKER_ID= 1 +KAFKA_ZOOKEEPER_CONNECT= zookeeper:2181 +KAFKA_LISTENER_SECURITY_PROTOCOL_MAP= DOCKER_NET:PLAINTEXT,HOST_NET:PLAINTEXT +KAFKA_ADVERTISED_LISTENERS= DOCKER_NET://kafka:9093,HOST_NET://localhost:9092 +KAFKA_INTER_BROKER_LISTENER_NAME= DOCKER_NET +KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR= 1 +KAFKA_TRANSACTION_STATE_LOG_MIN_ISR= 1 +KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR= 1 +KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS= 0 +KAFKA_JMX_PORT= 9999 +KAFKA_CREATE_TOPICS= "trigger_config_updates:1:1" + +#zookeeper +ZOOKEEPER_CLIENT_PORT = 2181 +ZOOKEEPER_TICK_TIME = 2000 +ZOOKEEPER_SYNC_LIMIT = 2 + +# Docker network name +DOCKER_NETWORK_NAME=cardano_autonomous_agent + +# Docker network name +DOCKER_NETWORK_NAME=cardano_autonomous_agent + +# Agent +AGENT_NODE_DOCKER_IMAGE_NAME=autonomous_agent + + + + diff --git a/.gitignore b/.gitignore index a6040386..bd55d9c4 100644 --- a/.gitignore +++ b/.gitignore @@ -3,5 +3,6 @@ app.log .idea/ .vscode/ */**/.vscode/ +.env diff --git a/README.md b/README.md index 81845ba6..ca5d3cb1 100644 --- a/README.md +++ b/README.md @@ -1,33 +1,138 @@ -Cardano Autonomous Agent Monorepo -============ +# Welcome to Autonomous Agent Testing -1. [Backend](api/) -2. [Agent Manager](agent-manager/) -3. [Agent](agent-node/) -4. [Frontend](frontend/) +Autonomous Agent Testing focuses on evaluating features introduced in the Cardano Improvement Proposal (CIP) 1694. This includes testing the creation and voting mechanisms for proposals to ensure the governance model operates seamlessly. Additionally, it verifies functionalities such as: -## Running the stack locally +- Registering and deregistering as a Delegated Representative (DRep). +- Managing stake registrations and deregistrations. +- Performing ADA transfers. +- Triggering operations manually, via a CRON schedule, or through event filtering. -Setup the required dependencies by running the command locally. +The testing process ensures these operations are secure, efficient, and aligned with the decentralized governance objectives of Cardano's Voltaire era. -```shell -docker compose -f docker-compose.local.yml up -d +--- + +## Accessing the Deployed Service + +You can access the deployed service here: [Autonomous Agent Testing](https://agents.cardanoapi.io/) + +--- + +## Running the Stack Locally + +### Directory Structure + +1. **`api`**: Backend service. +2. **`manager`**: Middleware between agents and the backend; handles various agent-related services. +3. **`agent-node`**: Agent responsible for executing various functions. +4. **`frontend`**: User interface for autonomous agent testing. +5. **`dbsync-api`**: Service for interacting with the dbsync database. +--- + +### Using Docker + +Before running the entire service locally using Docker, create `.env` files from `.env.example` and populate them with the necessary environment variables. Below are descriptions of key variables: +> **Note:** Some variables in `.env.example` are prepopulated. Keep them as it is or change them carefully. + +#### Changes to be made in `.env` file + +##### API and Manager + +- **`KAFKA_PREFIX`**: Prefix for Kafka topics. +- **`AGENT_MNEMONIC`**: Seed phrase to generate a wallet. + +##### Agent Manager + +- **`KUBER_API_KEY`**: Generate an API key from [KuberIde](https://kuberide.com/kuber/settings/api-keys). +- **`MANAGER_WALLET_ADDRESS`** (Optional): Wallet address with sufficient ADA for transfers. +- **`MANAGER_WALLET_SIGNING_KEY`** (Optional): Signing key for the manager wallet. +- **`FAUCET_API_KEY`** (Optional): API key to load ADA for agent transfers if the manager wallet lacks sufficient funds. +- **`BLOCKFROST_API_KEY`** (Required if `ENABLE_BLOCKFROST_SUBMIT_API` is enabled): Obtain from [Blockfrost](https://blockfrost.io/). + +> **Note:** If `ENABLE_BLOCKFROST_SUBMIT_API` is not enabled, transactions will be submitted using `Kuber`, which may take a few minutes. + +##### DBSync + +- **`DBSYNC_DATABASE_URL`**: URL for the `dbsync-api service`. Default running on `http://localhost:9000` on starting `dbsync-api` service. + +##### Docker Network Name + +- **`DOCKER_NETWORK_NAME`**: Customize the Docker network name (default value provided in `.env.example`). + +##### Agent + +- **`AGENT_NODE_DOCKER_IMAGE_NAME`**: Customize the Docker image name for the agent node. + +#### Running in `Preprod` or `Preview` Networks + +To run in `Preprod` or `Preview` networks, update the following environment variables: + +##### Frontend + +- **`NEXT_PUBLIC_NETWORK_NAME`**: Set to `preview` or `preprod`. + +##### API and Manager + +- **`DB_SYNC_BASE_URL`**: + - `https://preprod-dbync.agents.cardanoapi.io/api` for `preprod` + - `https://preview-dbync.agents.cardanoapi.io/api` for `preview` + +##### Manager Only + +- **`KUBER_BASE_URL`**: + - `https://preview.kuber.cardanoapi.io` for `preview` + - `https://preprod.kuber.cardanoapi.io` for `preprod` +- **`CARDANO_NETWORK_MAGIC`**: + - `3` for `preview` + - `2` for `preprod` +- **`BLOCKFROST_API_KEY`**: Obtain from [Blockfrost](https://blockfrost.io/) for the desired network. +- **`NETWORK_NAME`**: Set to `preprod` or `preview`. + +##### DBSync + +- **`DBSYNC_DATABASE_URL`**: Update the URL and database name accordingly. +--- + +#### Starting the Service + +Run the following command: + +```bash +docker compose -f docker-compose.dev.yml up -d ``` -**Note**: You can also use already existing services in place of this +> **Note:** Ensure no applications are running on ports `3000` and `8000`. + +#### Finally Running the Agent + +1. Visit the frontend at `http://localhost:3000` and connect your wallet. +2. Navigate to the `My Agent` tab in the bottom left to access the `Agents Page`. +3. In the `Overview Tab`, click the `Run Agent` button in the top-right corner of the `Agents Overview Section`. +4. Copy the Docker command and run it in the terminal. Your agent is now ready to operate. + +--- +### Local Setup -The setup guide for each services are in the respective directories: +Each service has its own setup guide within its respective directory. 1. [Backend](api/README.md) 2. [Agent Manager](agent-manager/README.md) 3. [Agent](agent-node/README.md) 4. [Frontend](frontend/README.md) +5. [DbSync-Api](dbsync-api/README.md) +**`Note`**: For running all services locally, dependencies like `Kafka` and `PostgreSQL` can be run via Docker using the following command: +```bash +docker compose -f docker-compose.dev.yml up -d +``` + +--- + +## Important -# IMPORTANT +Before committing any changes to the repository, set up the pre-commit hook by running the following command: -Please setup the pre-commit hook before adding any commit for git by running the following command: -```shell +```bash ./install-pre-commit-hook.sh -``` \ No newline at end of file +``` + diff --git a/agent-manager/.env.example b/agent-manager/.env.example index f5f626be..d1a4cbb0 100644 --- a/agent-manager/.env.example +++ b/agent-manager/.env.example @@ -1,34 +1,31 @@ -KAFKA_BROKERS=127.0.0.1:9092 -KAFKA_CLIENT_ID= -KAFKA_PREFIX=local +# Kafka +KAFKA_BROKERS= +KAFKA_TOPIC_PREFIX= +KAFKA_CONSUMER_GROUP= +CLIENT_ID= -CARDANO_NODE_URL= -CARDANO_NETWORK_MAGIC=4 -KUBER_BASE_URL= +# Cardano +CARDANO_NODE_URL=172.31.0.4:3004 +KUBER_BASE_URL='https://sanchonet.kuber.cardanoapi.io' KUBER_API_KEY= +METADATA_BASE_URL='https://metadata.drep.id' +DB_SYNC_BASE_URL= +CARDANO_NETWORK_MAGIC=4 BLOCKFROST_API_KEY= -ENABLE_BLOCKFROST_SUBMIT_API= +ENABLE_BLOCKFROST_SUBMIT_API='True' -DATABASE_URL=postgresql://root:root@localhost:5432/cardano_autonomous_agent_testing_db - +# Wallet MANAGER_WALLET_ADDRESS= MANAGER_WALLET_SIGNING_KEY= - -SANCHONET_FAUCET_API_KEY= - +FAUCET_API_KEY= AGENT_MNEMONIC= -METADATA_BASE_URL= -METADATA_FETCH_BASE_URL= - -DB_SYNC_BASE_URL= - -SERVER_PORT= -NETWORK_NAME= - +# Database +DATABASE_URL= -ELASTIC_APM_SERVER_URL=https://apm.sireto.io -ELASTIC_APM_API_KEY=XXX \ No newline at end of file +# Server +SERVER_PORT=3002 +NETWORK_NAME=sanchonet \ No newline at end of file diff --git a/agent-manager/README.md b/agent-manager/README.md index 13ec4b64..2df5967d 100644 --- a/agent-manager/README.md +++ b/agent-manager/README.md @@ -1,26 +1,25 @@ # Agent Manager Application -This project is a TypeScript Agent Manager application where Agents are connected to it through websocket . +This project is a TypeScript Agent Manager application where agents are connected to it through websocket. ## Table of Contents - [Requirements](#requirements) - [Installation](#installation) -- [Usage](#usage) -- [Development](#development) ## Requirements - [Node.js](https://nodejs.org/) (v18.18.0 or higher) - [yarn](https://yarnpkg.com/) package manager +- `kafka service` +- `postgres server` ## Installation -1. Clone the repository: +1. Go to the agent-manager folder (If in root folder) ```shell - git clone https://github.com/sireto/cardano-autonomous-agent.git - cd cardano-autonomous-agent/agent-manager + cd agent-manager ``` 2. Install dependencies using yarn: @@ -31,42 +30,73 @@ This project is a TypeScript Agent Manager application where Agents are connecte ## Usage -Copy the env variables form `.env.example` to `.env` and update the env variables. +Create new file `.env` and copy env variables form `.env.example` to `.env` and update the env variables. -Make sure to run the following command to generate the database client and creating the required table mentioned in schema +#### Setup environment variables -```bash -yarn prisma generate -``` +#### Kafka Configuration -### Development Mode +- **`KAFKA_CONSUMER_GROUP`**: Kafka consumer group name. +- **`CLIENT_ID`**: Unique client ID for Kafka. +- **`KAFKA_PREFIX`**: Prefix for Kafka topics. +- **`KAFKA_BROKERS`**: Kafka broker URL. Specify either a locally running Kafka URL (e.g., on Docker) or a deployed Kafka service URL. -To run the application in dev mode run the following command +#### Cardano Configuration -```shell -yarn dev -``` +- **`CARDANO_NODE_URL`**: `172.31.0.4:3004` - URL for the Cardano node. +- **`KUBER_BASE_URL`**: `'https://sanchonet.kuber.cardanoapi.io'` - Base URL for Kuber's Cardano API. +- **`KUBER_API_KEY`**: API key for accessing Kuber services. Generate an API key from [KuberIde](https://kuberide.com/kuber/settings/api-keys). +- **`METADATA_BASE_URL`**: Metadata URL to fetch information about dReps and proposals across different networks. (Default provided in `.env.example`) +- **`DB_SYNC_BASE_URL`**: URL for the `dbsync-api service`. Default running on `http://localhost:9000` on starting `dbsync-api` service. +- **`CARDANO_NETWORK_MAGIC`**: `4` - Network magic for the Cardano testnet(Sanchonet). +- **`BLOCKFROST_API_KEY`** (Optional): API key for accessing the Blockfrost API. (Required if `ENABLE_BLOCKFROST_SUBMIT_API` is enabled): Obtain from [Blockfrost](https://blockfrost.io/). +- **`ENABLE_BLOCKFROST_SUBMIT_API`** (Optional): `'True'` - Enable or disable Blockfrost transaction submission API. + > **Note:** If `ENABLE_BLOCKFROST_SUBMIT_API` is not enabled, transactions will be submitted using `Kuber`, which may take a few minutes. -### Production Mode +#### Wallet Configuration -To run the Agent Manager application, follow these steps: +- **`MANAGER_WALLET_ADDRESS`** (Optional): Wallet address with sufficient ADA for transfers. +- **`MANAGER_WALLET_SIGNING_KEY`** (Optional): Signing key for the manager wallet. +- **`FAUCET_API_KEY`** (Optional): API key to load ADA for agent transfers if the manager wallet lacks sufficient funds. +- **`AGENT_MNEMONIC`**: Seed phrase used to generate a wallet. -1. Build the application using the following command: +#### Database Configuration - ```shell - yarn build - ``` +- **`DATABASE_URL`**: PostgreSQL database URL. Specify either a local Docker-based database or a deployed database URL. - This will compile the TypeScript files into JavaScript and place the output in the `dist` directory. +#### Server Configuration -2. Run the application with an agent ID as a command-line argument: +- **`SERVER_PORT`** (OPTIONAL): `3002` - Port number for the server. (Default port is 3001) +- **`NETWORK_NAME`**: `sanchonet` - Name of the Cardano network. - ```shell - yarn start - ``` +After updating environment variables make sure to run the following command to generate the database client and creating the required table mentioned in schema -Make sure your API service is up and running . +```bash +yarn prisma generate +``` -If successful a server listening on port `3000` will be running: +Now finally run the below command to start the manager: + +```bash +yarn dev +``` + +If successful a server listening on mentioned PORT will be running: > http://localhost:3001 + +## Running in `Preprod` or `Preview` Networks + +To run in `Preprod` or `Preview` networks, update the following environment variables: + +- **`DB_SYNC_BASE_URL`**: + - `https://preprod-dbync.agents.cardanoapi.io/api` for `preprod` + - `https://preview-dbync.agents.cardanoapi.io/api` for `preview` +- **`KUBER_BASE_URL`**: + - `https://preview.kuber.cardanoapi.io` for `preview` + - `https://preprod.kuber.cardanoapi.io` for `preprod` +- **`CARDANO_NETWORK_MAGIC`**: + - `3` for `preview` + - `2` for `preprod` +- **`BLOCKFROST_API_KEY`**: Obtain from [Blockfrost](https://blockfrost.io/) for the desired network. +- **`NETWORK_NAME`**: Set to `preprod` or `preview`. diff --git a/agent-manager/src/controller/health.ts b/agent-manager/src/controller/health.ts index 873582b2..1a9b7d2e 100644 --- a/agent-manager/src/controller/health.ts +++ b/agent-manager/src/controller/health.ts @@ -58,9 +58,9 @@ async function healthCheck(req: Request, res: Response) { database: { isHealthy: isDatabaseHealthy, }, - metadata:{ - isHealthy:isMetadataHealthy - } + metadata: { + isHealthy: isMetadataHealthy, + }, }, }) } catch (err: any) { diff --git a/agent-node/.env.example b/agent-node/.env.example index 974c8717..02499e2f 100644 --- a/agent-node/.env.example +++ b/agent-node/.env.example @@ -1,4 +1,2 @@ -WS_URL=ws://localhost:3001 -AGENT_SECRET_KEY= -NETWORK= TOKEN= +WS_URL=ws://localhost:3001 \ No newline at end of file diff --git a/agent-node/README.md b/agent-node/README.md index fa913109..b922b77c 100644 --- a/agent-node/README.md +++ b/agent-node/README.md @@ -2,61 +2,36 @@ This project is a TypeScript client application that connects to a server via WebSocket and processes configurations sent by the server. It can schedule and trigger functions based on received configurations. -## Table of Contents - -- [Requirements](#requirements) -- [Installation](#installation) -- [Usage](#usage) -- [Development](#development) - ## Requirements - [Node.js](https://nodejs.org/) (v18.18.0 or higher) - [yarn](https://yarnpkg.com/) package manager +- `Agent-Manager` service +- `Fronted` service +- `Backend` service +- `DbSync-api` service ## Installation -1. Clone the repository: - - ```shell - git clone https://github.com/sireto/cardano-autonomous-agent.git - cd cardano-autonomous-agent/agent-node - ``` - -2. Install dependencies using npm or yarn: +1. Install dependencies using npm or yarn: ```shell yarn install ``` -## Usage +2. Create new file `.env` and copy env variables form `.env.example` to `.env` and update the env variables. ### Setting up environment variables -Copy environment variables from `.env.example` to `.env` and update them as necessary. +- **`TOKEN`**: Run the frontend and visit `My Agent` tab from left bottom section of the page. Then click `Run Agent` button on top right of the `Agent Overview` section. Copy the token part only and paste it in env. +- **`WS_URL`**: `agent-manager` websocket url . Default on `ws://localhost:3001' -> **Note**: AGENT_ID if the ID of the agent created with API. +**`Note`** - Remember to add `ws` as protocol in `WS_URL` instead of `http`. -### Development Mode +Copy environment variables from `.env.example` to `.env` and update them as necessary. -To run the application in dev mode run the following command +Finally run the agent by running below command. ```shell -yarn dev + yarn dev ``` - -### Production Mode - -1. Build the application using the following command: - - ```shell - yarn build - ``` - - This will compile the TypeScript files into JavaScript and place the output in the `dist` directory. - -2. Run the application with an agent ID as a command-line argument: - - ```shell - yarn start - ``` diff --git a/agent-node/src/functions/proposalNewConstitution.ts b/agent-node/src/functions/proposalNewConstitution.ts index dfe16ac3..81ed8fbb 100644 --- a/agent-node/src/functions/proposalNewConstitution.ts +++ b/agent-node/src/functions/proposalNewConstitution.ts @@ -25,17 +25,17 @@ export default async function handler( return await context.wallet .buildAndSubmit(req, true) .then((v) => v) - .catch(async(e) => { + .catch(async (e) => { if (e.includes('ProposalReturnAccountDoesNotExist')) { await context.builtins.registerStake().catch((e) => { throw e }) return context.wallet - .buildAndSubmit(req) - .then((v) => v) - .catch((e) => { - throw e - }) + .buildAndSubmit(req) + .then((v) => v) + .catch((e) => { + throw e + }) } else { throw e } diff --git a/agent-node/src/functions/treasuryWithdrawal.ts b/agent-node/src/functions/treasuryWithdrawal.ts index 6b5fc787..bf797126 100644 --- a/agent-node/src/functions/treasuryWithdrawal.ts +++ b/agent-node/src/functions/treasuryWithdrawal.ts @@ -33,17 +33,17 @@ export default async function handler( }, ], } - return await context.wallet.buildAndSubmit(req, false).catch(async(e)=>{ + return await context.wallet.buildAndSubmit(req, false).catch(async (e) => { if (e.includes('ProposalReturnAccountDoesNotExist')) { await context.builtins.registerStake().catch((e) => { throw e }) return context.wallet - .buildAndSubmit(req) - .then((v) => v) - .catch((e) => { - throw e - }) + .buildAndSubmit(req) + .then((v) => v) + .catch((e) => { + throw e + }) } else { throw e } diff --git a/agent-node/src/index.ts b/agent-node/src/index.ts index 64f843f2..0e61049e 100644 --- a/agent-node/src/index.ts +++ b/agent-node/src/index.ts @@ -32,10 +32,12 @@ if (token) { } if (!wsUrl) { const network = token.split('_')[0] - - if (network && process.env.MANAGER_BASE_DOMAIN) { + const managerBaseDomain = process.env.MANAGER_BASE_DOMAIN + if (network && managerBaseDomain) { // This is set in docker file - wsUrl = `wss://${network.toLowerCase()}.${process.env.MANAGER_BASE_DOMAIN}` + wsUrl = `wss://${network.toLowerCase()}.${managerBaseDomain}` + } else if (managerBaseDomain) { + wsUrl = `ws://${managerBaseDomain}` } else { wsUrl = 'ws://localhost:3001' } diff --git a/agent-node/src/utils/validator.ts b/agent-node/src/utils/validator.ts index fbee26e7..170bebd5 100644 --- a/agent-node/src/utils/validator.ts +++ b/agent-node/src/utils/validator.ts @@ -4,13 +4,12 @@ import { logicalFunctions } from './operatorSupport' const NetworkName = ['preview', 'preprod', 'sanchonet'] export function validateToken(token: string) { - if (token.split('_').length !== 2) { + if (token.split('_').length < 1) { return 'Not a valid token. Missing secret key' } if (token.split('_')[1].includes('undefined')) { return 'Not a valid token. Missing secret key' } - if (!NetworkName.includes(token.split('_')[0])) return 'Not a valid network name' return '' } diff --git a/api/.env.example b/api/.env.example index 1000d661..041a74a3 100644 --- a/api/.env.example +++ b/api/.env.example @@ -1,21 +1,8 @@ -# Environment -# Allowed Values : development , production -APP_ENV=production - DATABASE_URL= -AGENT_MNEMONIC="" +DOCS_URL='/api/docs' KAFKA_BROKERS= +AGENT_MNEMONIC= KAFKA_ENABLED=true -DOCS_URL=/api/docs -OPENAPI_URL=/api/openapi.json +METADATA_BASE_URL='https://metadata.drep.id/api' DB_SYNC_BASE_URL= - -KUBER_URL=localhost -KAFKA_PREFIX= - -JWT_SECRET_KEY="" - -METADATA_API='' -DB_SYNC_API='' - -ELASTIC_APM_SERVER_URL=https://apm.sireto.io +KAFKA_PREFIX= \ No newline at end of file diff --git a/api/README.md b/api/README.md index 331ac6a4..1cf45e81 100644 --- a/api/README.md +++ b/api/README.md @@ -8,39 +8,14 @@ Python version : 3.12.2 Poetry version : 1.8.2 -## Docker - -## Setup Guide - -Clone the project - -```bash - git clone https://github.com/sireto/cardano-autonomous-agent -``` - -Change directory - -```bash - cd cardano-autonomous-agent -``` - -Run Docker-Compose . This will setup up the **postgres Database**, **pgadmin4** , **kafka** and **backend** via Docker. - -```bash - docker compose -f "docker-compose.deployment.yml" up --build -d -``` - -After successfully run ,Go to http://0.0.0.0:8000/ , to see the list of api services - -## Locally - -## Setup Guide - #### Prerequisites - Python version: `3.12` or higher - Poetry version: `1.8.3` or higher - Pip version: `24.0.0` or higher +- `kafka service` +- `postgres server` + #### Steps @@ -48,9 +23,7 @@ After successfully run ,Go to http://0.0.0.0:8000/ , to see the list of api serv > > - Postgres (Required) > -> - Kafka with Zookeeper (Optional) -> -> - Redis (Optional) +> - Kafka (Required)
@@ -65,6 +38,16 @@ After successfully run ,Go to http://0.0.0.0:8000/ , to see the list of api serv ```shell poetry shell ``` +3. Check if your virtual env is created using python of version `3.12` or higher +> **Note:** Your terminal should have something like this `(backend-py3.12) ` + - If it is not created using python of version `3.12` or higher then create virtual environment again using command + ```shell + poetry env use 3.12 + ``` + - And finally again use command + ```shell + poetry shell + ``` 3. Install Dependencies @@ -72,7 +55,20 @@ After successfully run ,Go to http://0.0.0.0:8000/ , to see the list of api serv poetry install ``` -4. Update the environment variables copying it form `.env.example` to `.env` +4. Make new file `.env` using `.env.example` and update the environments before running the below steps: + +#### Setup environment variables + +- **`KAFKA_PREFIX`**: Prefix for Kafka topics. +- **`AGENT_MNEMONIC`**: Seed phrase to generate a wallet. +- **`DOCS_URL`**: Path for swagger docs +- **`KAFKA_ENABLED`**: To enable kafka (Must be enabled by putting value `true` to run the testing agents) +- **`METADATA_BASE_URL`**: Metadata url to fetch metadata of the drep and proposals of different network. (Default provided in `.env.example`) +- **`DB_SYNC_BASE_URL`**: URL for the `dbsync-api service`. Default running on `http://localhost:9000` on starting `dbsync-api` service. +- **`KAFKA_PREFIX`**: Kafka prefix topic +- **`DATABASE_URL`**: Postgres database url. Specify either a locally running Postgres database URL (e.g., on Docker) or a deployed Postgres database URL. +- **`KAFKA_BROKERS`**: Kafka broker URL. Specify either a locally running Kafka URL (e.g., on Docker) or a deployed Kafka service URL. + 5. Run this command for generating the database client and creating the required table mentioned in schema @@ -81,13 +77,7 @@ After successfully run ,Go to http://0.0.0.0:8000/ , to see the list of api serv prisma migrate dev ``` -## Running the Server - -Activate Poetry venv inside autonomous-agents-api folder by running the following command. - -```bash - poetry shell -``` +> **Note**: You should always activate virtual environment by using command `poetry shell` before running below command Start the server with env variables. @@ -95,6 +85,6 @@ Start the server with env variables. uvicorn backend.app:get_application --port 8000 --reload --env-file .env ``` -Go to http://localhost:8000 +Go to http://localhost:8000/api/docs -You would see the list of API available +You would see the list of available API diff --git a/api/backend/app/controllers/health.py b/api/backend/app/controllers/health.py index f3ed3347..c20f20d4 100644 --- a/api/backend/app/controllers/health.py +++ b/api/backend/app/controllers/health.py @@ -73,7 +73,7 @@ async def readiness_check(): async def dbsync_health_check(): async with aiohttp.ClientSession() as session: - async with session.get(api_settings.DB_SYNC_API + "/health") as response: + async with session.get(api_settings.DB_SYNC_BASE_URL + "/health") as response: if response.status == 200: return True return False diff --git a/api/backend/app/controllers/internal/metadata_router.py b/api/backend/app/controllers/internal/metadata_router.py index a61541ba..ebf97754 100644 --- a/api/backend/app/controllers/internal/metadata_router.py +++ b/api/backend/app/controllers/internal/metadata_router.py @@ -9,13 +9,13 @@ class MetadataRouter(Routable): def __init__(self): super().__init__() - self.metadata_api = APISettings().METADATA_API + self.metadata_base_url = APISettings().METADATA_BASE_URL @get("/metadata") async def fetch_metadata(self, metadata_url: str): async with aiohttp.ClientSession() as session: async with session.get( - f"{self.metadata_api}/metadata?url={metadata_url}&hash=1111111111111111111111111111111111111111111111111111111111111112" + f"{self.metadata_base_url}/metadata?url={metadata_url}&hash=1111111111111111111111111111111111111111111111111111111111111112" ) as resp: response = await resp.json() if resp.ok: diff --git a/api/backend/app/services/agent_service.py b/api/backend/app/services/agent_service.py index 8ad75e1d..d483659e 100644 --- a/api/backend/app/services/agent_service.py +++ b/api/backend/app/services/agent_service.py @@ -205,7 +205,7 @@ async def fetch_data(self, url, session: ClientSession): raise HTTPException(status_code=400, content="Error fetching agent Drep details") async def fetch_balance(self, stake_address: str, session: ClientSession): - async with session.get(f"{api_settings.DB_SYNC_API}/address/balance?address={stake_address}") as response: + async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/address/balance?address={stake_address}") as response: try: return await response.json() except: @@ -215,7 +215,7 @@ async def fetch_balance(self, stake_address: str, session: ClientSession): ) async def fetch_drep_details(self, drep_id: str, session: ClientSession) -> Dict[str, float | bool]: - async with session.get(f"{api_settings.DB_SYNC_API}/drep/{drep_id}") as response: + async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/drep/{drep_id}") as response: try: res = await response.json() voting_power = res.get("votingPower") / (10**6) if res.get("votingPower") else 0 @@ -228,7 +228,7 @@ async def fetch_drep_details(self, drep_id: str, session: ClientSession) -> Dict ) async def fetch_stake_address_details(self, stake_address: str, session: ClientSession): - async with session.get(f"{api_settings.DB_SYNC_API}/stake-address?address={stake_address}") as response: + async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/stake-address?address={stake_address}") as response: try: is_stake_registered = False res = await response.json() @@ -250,7 +250,7 @@ async def fetch_stake_address_details(self, stake_address: str, session: ClientS ) async def fetch_delegation_details(self, stake_address: str, session: ClientSession): - async with session.get(f"{api_settings.DB_SYNC_API}/delegation?address={stake_address}") as response: + async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/delegation?address={stake_address}") as response: try: res = await response.json() drep_id = res.get("drep", {}).get("drep_id") if res.get("drep") else None diff --git a/api/backend/app/services/drep_service.py b/api/backend/app/services/drep_service.py index b5a961ea..b833989b 100644 --- a/api/backend/app/services/drep_service.py +++ b/api/backend/app/services/drep_service.py @@ -58,7 +58,7 @@ async def fetch_internal_dreps(self, page: int, page_size: int, search: str | No async def fetch_metadata(self, agent: Agent, index: int, agents: list[Any], session: ClientSession): drep_dict = {} drep_id = convert_base64_to_hex(agent.wallet_details[0].stake_key_hash) - async with session.get(f"{api_settings.DB_SYNC_API}/drep?search={drep_id}") as response: + async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/drep?search={drep_id}") as response: response_json = await response.json() if response_json["items"]: if drep_id == response_json["items"][0]["drepId"]: @@ -68,7 +68,7 @@ async def fetch_metadata(self, agent: Agent, index: int, agents: list[Any], sess metadata_hash = drep_dict.get("metadataHash") try: async with session.get( - f"{api_settings.METADATA_API}/metadata?url={url}&hash={metadata_hash}" + f"{api_settings.METADATA_BASE_URL}/metadata?url={url}&hash={metadata_hash}" ) as metadata_resp: metadata_resp_json = await metadata_resp.json() if "hash" in metadata_resp_json: @@ -84,9 +84,9 @@ async def fetch_metadata(self, agent: Agent, index: int, agents: list[Any], sess async def fetch_external_dreps(self, page: int, page_size: int, search: str | None): if search: - fetchUrl = f"{api_settings.DB_SYNC_API}/drep?search={search}" + fetchUrl = f"{api_settings.DB_SYNC_BASE_URL}/drep?search={search}" else: - fetchUrl = f"{api_settings.DB_SYNC_API}/drep?page={page}&size={page_size}" + fetchUrl = f"{api_settings.DB_SYNC_BASE_URL}/drep?page={page}&size={page_size}" async with aiohttp.ClientSession() as session: async with session.get(fetchUrl) as response: @@ -129,7 +129,7 @@ async def fetch_metadata_for_drep(self, metadata_hash: str, url: str, drep: Any) try: async with aiohttp.ClientSession() as session: async with session.get( - f"{api_settings.METADATA_API}/metadata?url={url}&hash={metadata_hash}" + f"{api_settings.METADATA_BASE_URL}/metadata?url={url}&hash={metadata_hash}" ) as metadata_resp: metadata_resp_json = await metadata_resp.json() if "hash" in metadata_resp_json: diff --git a/api/backend/app/services/proposal_service.py b/api/backend/app/services/proposal_service.py index 0a523f91..c40d4127 100644 --- a/api/backend/app/services/proposal_service.py +++ b/api/backend/app/services/proposal_service.py @@ -66,7 +66,7 @@ async def get_internal_proposals(self, page: int = 1, pageSize: int = 10, search async def add_metadata_and_agent_detail_in_internal_proposal( self, proposal: TriggerHistoryDto, index: int, results: list[Any] ): - url = f"{api_settings.DB_SYNC_API}/proposal?proposal={proposal.txHash}" + url = f"{api_settings.DB_SYNC_BASE_URL}/proposal?proposal={proposal.txHash}" proposal_data = await self._fetch_proposal_data(url) if not proposal_data: results[index] = "" @@ -83,9 +83,9 @@ async def add_metadata_and_agent_detail_in_internal_proposal( results[index] = proposal_dict async def get_external_proposals(self, page: int, pageSize: int, sort: str, search: str | None = None): - search_url = f"{api_settings.DB_SYNC_API}/proposal?page={page}&size={pageSize}&sort={sort}" + search_url = f"{api_settings.DB_SYNC_BASE_URL}/proposal?page={page}&size={pageSize}&sort={sort}" if search: - search_url = f"{api_settings.DB_SYNC_API}/proposal?proposal={search}" + search_url = f"{api_settings.DB_SYNC_BASE_URL}/proposal?proposal={search}" async with aiohttp.ClientSession() as session: async with session.get(search_url) as response: @@ -126,7 +126,7 @@ async def _fetch_metadata(self, metadata_hash: str, url: str, proposal_dict: Any try: async with aiohttp.ClientSession() as session: async with session.get( - f"{api_settings.METADATA_API}/metadata?url={url}&hash={metadata_hash}" + f"{api_settings.METADATA_BASE_URL}/metadata?url={url}&hash={metadata_hash}" ) as metadata_resp: metadata_resp_json = await metadata_resp.json() if "hash" in metadata_resp_json: diff --git a/api/backend/config/api_settings.py b/api/backend/config/api_settings.py index a493baa1..4d5d3670 100644 --- a/api/backend/config/api_settings.py +++ b/api/backend/config/api_settings.py @@ -5,9 +5,9 @@ class APISettings(BaseSettings): APP_ENV: str = "production" SECURE: bool = None JWT_SECRET_KEY: str = "" - DB_SYNC_API: str = "https://dbsyncapi.agents.cardanoapi.io/api" + DB_SYNC_BASE_URL: str = "https://dbsyncapi.agents.cardanoapi.io/api" SAME_SITE = "None" - METADATA_API: str = "https://metadata.drep.id/api" + METADATA_BASE_URL: str = "https://metadata.drep.id/api" GOV_ACTION_API: str = "https://govtool.cardanoapi.io/api" KAFKA_TOPIC_PREFIX: str = "" KAFKA_PREFIX: str = "" diff --git a/dbsync-api/.env.example b/dbsync-api/.env.example index 95abce2b..017d1687 100644 --- a/dbsync-api/.env.example +++ b/dbsync-api/.env.example @@ -1,4 +1,3 @@ -DATABASE_URL=postgres://sudipbhattarai@localhost/dbsync_sanchonet -NODE_ENV=prod +DATABASE_URL= CORS_ENABLE=true -PORT=3001 \ No newline at end of file +PORT=9000 \ No newline at end of file diff --git a/dbsync-api/README.md b/dbsync-api/README.md new file mode 100644 index 00000000..354d34a0 --- /dev/null +++ b/dbsync-api/README.md @@ -0,0 +1,29 @@ +# dbsync-api Service + +The `dbsync-api` service is a Node.js application designed to provide API access to interact with the `dbsync` database. This service enables efficient data retrieval for Cardano-related operations. + +## Requirements +Before running the service, ensure the following dependencies are installed: +- PostgreSQL server + +## Installation +1. Run this command for package installation + ```shell + yarn install + ``` +2. Create new file `.env` and copy env variables form `.env.example` to `.env` and update the env variables. +- **`DATABASE_URL`**: PostgreSQL database URL for dbsync_sanchonet. For `preprod` and `preview` update database instance accordingly. +- **`PORT`**: Port for running the server. Default value is 8080 +- **`CORS_ENABLE`**: CORS support for cross-origin requests. + +3. Run the following command to generate the database client and creating the required table mentioned in schema + ```bash + yarn prisma generate + ``` + +4. Now finally run the below command to start the `dbsync-api` service: + ```bash + yarn dev + ``` + +Now goto `http://localhost:8080/api/docs` to see list of api in `swaggerDocs`. diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 138ce68e..83b32ac9 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -7,10 +7,16 @@ services: build: context: ./frontend dockerfile: Dockerfile + args: + - NEXT_PUBLIC_MANAGER_BASE_DOMAIN=${NEXT_PUBLIC_MANAGER_BASE_DOMAIN} + - NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL} + - NEXT_PUBLIC_NETWORK_NAME=${NEXT_PUBLIC_NETWORK_NAME} + - NEXT_PUBLIC_DOCKER_NETWORK_NAME=${DOCKER_NETWORK_NAME} + - NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME=${AGENT_NODE_DOCKER_IMAGE_NAME} ports: - "3000:3000" - environment: - - "NEXT_PUBLIC_API_ENDPOINT_HOST=http://api.agents.cardanoapi.io/api" + networks: + - autonomous-agent # FastAPI application (build the image) @@ -19,14 +25,20 @@ services: ports: - "8000:8000" environment: - - DATABASE_URL=postgresql://root:root@postgres:5432/cardano_autonomous_agent_testing_db - - KAFKA_BROKERS=kafka:9093 - - DOCS_URL=/api/docs - - OPENAPI_URL=/aio/openapi.json + - DOCS_URL=${DOCS_URL} + - KAFKA_ENABLED=${KAFKA_ENABLED} + - METADATA_BASE_URL=${METADATA_BASE_URL} + - DB_SYNC_BASE_URL=${DB_SYNC_BASE_URL} + - KAFKA_PREFIX=${KAFKA_PREFIX} + - DATABASE_URL=${DATABASE_URL} + - KAFKA_BROKERS=${KAFKA_BROKERS} + - AGENT_MNEMONIC=${AGENT_MNEMONIC} + restart: on-failure:3 depends_on: - postgres - kafka - - pgadmin + networks: + - autonomous-agent # Agent Manager application (build image) agent_manager: @@ -35,92 +47,108 @@ services: dockerfile: Dockerfile ports: - "3001:3001" + restart: on-failure:3 depends_on: - postgres - kafka - api environment: - - DATABASE_URL=postgresql://root:root@postgres:5432/cardano_autonomous_agent_testing_db - - BROKER_URL=kafka:9093 - - CLIENT_ID=my-app - - CARDANO_NODE_URL=95.217.224.100:3006 - - API_URL=http://api:8000 + - CLIENT_ID=${CLIENT_ID} + - CARDANO_NODE_URL=${CARDANO_NODE_URL} + - KUBER_BASE_URL=${KUBER_BASE_URL} + - KUBER_API_KEY=${KUBER_API_KEY} + - MANAGER_WALLET_ADDRESS=${MANAGER_WALLET_ADDRESS} + - MANAGER_WALLET_SIGNING_KEY=${MANAGER_WALLET_SIGNING_KEY} + - FAUCET_API_KEY=${FAUCET_API_KEY} + - CARDANO_NETWORK_MAGIC=${CARDANO_NETWORK_MAGIC} + - BLOCKFROST_API_KEY=${BLOCKFROST_API_KEY} + - ENABLE_BLOCKFROST_SUBMIT_API=${ENABLE_BLOCKFROST_SUBMIT_API} + - NETWORK_NAME=${NETWORK_NAME} + - KAFKA_PREFIX=${KAFKA_PREFIX} + - DATABASE_URL=${DATABASE_URL} + - KAFKA_BROKERS=${KAFKA_BROKERS} + - AGENT_MNEMONIC=${AGENT_MNEMONIC} + networks: + - autonomous-agent - #Agent (build image) - agent: +# DbSync + dbsync: build: - context: ./agent-node + context: ./dbsync-api dockerfile: Dockerfile ports: - - "3002:3002" - depends_on: - - agent_manager + - "9000:9000" environment: - - WS_URL= # Use service name as hostname within Docker network - - AGENT_ID= # Provide the agent ID as needed + - PORT=${DBSYNC_PORT} + - CORS_ENABLE=${DBSYNC_CORS_ENABLE} + - DATABASE_URL=${DBSYNC_DATABASE_URL} + + networks: + - autonomous-agent + + agent-node: + build: + context: ./agent-node + dockerfile: Dockerfile + image: ${AGENT_NODE_DOCKER_IMAGE_NAME} #Database postgres: image: postgres:16.2 environment: - POSTGRES_DB: cardano_autonomous_agent_testing_db - POSTGRES_USER: root - POSTGRES_PASSWORD: root + - POSTGRES_DB=${POSTGRES_DB} + - POSTGRES_USER=${POSTGRES_USER} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} ports: - "5432:5432" volumes: - pg-data:/var/lib/postgresql/data + networks: + - autonomous-agent - #Pgadmin - pgadmin: - image: dpage/pgadmin4:8.2 - environment: - PGADMIN_DEFAULT_EMAIL: admin@pgadmin.com - PGADMIN_DEFAULT_PASSWORD: password - ports: - - "5050:80" - -#Zookeper + #Zookeper zookeeper: image: confluentinc/cp-zookeeper:7.0.1 + container_name: zookeeper_agent_autonomous ports: - "2181:2181" environment: - ZOOKEEPER_CLIENT_PORT: 2181 - ZOOKEEPER_TICK_TIME: 2000 - ZOOKEEPER_SYNC_LIMIT: 2 + - ZOOKEEPER_CLIENT_PORT=${ZOOKEEPER_CLIENT_PORT} + - ZOOKEEPER_TICK_TIME=${ZOOKEEPER_TICK_TIME} + - ZOOKEEPER_SYNC_LIMIT=${ZOOKEEPER_SYNC_LIMIT} + networks: + - autonomous-agent - #Kafka + #Kafka kafka: image: confluentinc/cp-kafka:7.0.1 hostname: kafka + container_name: kafka_agent_autonomous depends_on: - zookeeper ports: - "9092:9092" + - "9093:9093" environment: - KAFKA_BROKER_ID: 1 - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER_NET:PLAINTEXT,HOST_NET:PLAINTEXT - KAFKA_ADVERTISED_LISTENERS: DOCKER_NET://kafka:9093,HOST_NET://localhost:9092 - KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER_NET - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 - KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 - KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 - KAFKA_JMX_PORT: 9999 - KAFKA_CREATE_TOPICS: "trigger_config_updates:1:1" + - KAFKA_BROKER_ID=${KAFKA_BROKER_ID} + - KAFKA_ZOOKEEPER_CONNECT=${KAFKA_ZOOKEEPER_CONNECT} + - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=${KAFKA_LISTENER_SECURITY_PROTOCOL_MAP} + - KAFKA_ADVERTISED_LISTENERS=${KAFKA_ADVERTISED_LISTENERS} + - KAFKA_INTER_BROKER_LISTENER_NAME=${KAFKA_INTER_BROKER_LISTENER_NAME} + - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=${KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR} + - KAFKA_TRANSACTION_STATE_LOG_MIN_ISR=${KAFKA_TRANSACTION_STATE_LOG_MIN_ISR} + - KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR=${KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR} + - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=${KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS} + - KAFKA_JMX_PORT=${KAFKA_JMX_PORT} + - KAFKA_CREATE_TOPICS=${KAFKA_CREATE_TOPICS} + networks: + - autonomous-agent -#Kafka UI - kafka-ui: - image: provectuslabs/kafka-ui - ports: - - "8080:8080" - restart: always - environment: - - KAFKA_CLUSTERS_0_NAME=local - - KAFKA_CLUSTERS_0_BOOTSTRAP_SERVERS=kafka:9093 +networks: + autonomous-agent: + name: ${DOCKER_NETWORK_NAME} + driver: bridge volumes: pg-data: \ No newline at end of file diff --git a/docker-compose.local.yml b/docker-compose.local.yml index 5d0be260..46475b53 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -17,67 +17,42 @@ services: networks: - cardano-autonomous-backend -# #Pgadmin -# pgadmin: -# image: dpage/pgadmin4:8.2 -# container_name: pg-admin -# environment: -# PGADMIN_DEFAULT_EMAIL: admin@pgadmin.com -# PGADMIN_DEFAULT_PASSWORD: password -# ports: -# - "5050:80" -# networks: -# - cardano-autonomous-backend -# -##Zookeper -# zookeeper: -# image: confluentinc/cp-zookeeper:7.0.1 -# container_name: zookeeper_agent_autonomous -# ports: -# - "2181:2181" -# environment: -# ZOOKEEPER_CLIENT_PORT: 2181 -# ZOOKEEPER_TICK_TIME: 2000 -# ZOOKEEPER_SYNC_LIMIT: 2 -# networks: -# - cardano-autonomous-backend -# -# #Kafka -# kafka: -# image: confluentinc/cp-kafka:7.0.1 -# hostname: kafka -# container_name: kafka_agent_autonomous -# depends_on: -# - zookeeper -# ports: -# - "9092:9092" -# environment: -# KAFKA_BROKER_ID: 1 -# KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 -# KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER_NET:PLAINTEXT,HOST_NET:PLAINTEXT -# KAFKA_ADVERTISED_LISTENERS: DOCKER_NET://kafka:9093,HOST_NET://localhost:9092 -# KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER_NET -# KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 -# KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 -# KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 -# KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 -# KAFKA_JMX_PORT: 9999 -# KAFKA_CREATE_TOPICS: "trigger_config_updates:1:1" -# networks: -# - cardano-autonomous-backend -# -##Kafka UI -# kafka-ui: -# image: provectuslabs/kafka-ui -# container_name: kafka-ui-agent-autonomous -# ports: -# - "8080:8080" -# restart: always -# environment: -# - KAFKA_CLUSTERS_0_NAME=local -# - KAFKA_CLUSTERS_0_BOOTSTRAP_SERVERS=kafka:9093 -# networks: -# - cardano-autonomous-backend +#Zookeper + zookeeper: + image: confluentinc/cp-zookeeper:7.0.1 + container_name: zookeeper_agent_autonomous + ports: + - "2181:2181" + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + ZOOKEEPER_SYNC_LIMIT: 2 + networks: + - cardano-autonomous-backend + + #Kafka + kafka: + image: confluentinc/cp-kafka:7.0.1 + hostname: kafka + container_name: kafka_agent_autonomous + depends_on: + - zookeeper + ports: + - "9092:9092" + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER_NET:PLAINTEXT,HOST_NET:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: DOCKER_NET://kafka:9093,HOST_NET://localhost:9092 + KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER_NET + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_JMX_PORT: 9999 + KAFKA_CREATE_TOPICS: "trigger_config_updates:1:1" + networks: + - cardano-autonomous-backend networks: cardano-autonomous-backend: diff --git a/frontend/.env.example b/frontend/.env.example index 46757227..b1b656d4 100644 --- a/frontend/.env.example +++ b/frontend/.env.example @@ -1,4 +1,3 @@ -API_URL = 'http://localhost:8000/api' # internal api url NEXT_PUBLIC_API_URL = # api url accessed from browser -NEXT_PUBLIC_NETWORK_NAME = sanchonet # cardano network on which the paltform is running +NEXT_PUBLIC_NETWORK_NAME = sanchonet # cardano network on which the platform is running NEXT_PUBLIC_ENABLE_AGENT_INSTANCE=false \ No newline at end of file diff --git a/frontend/Dockerfile b/frontend/Dockerfile index e67882c3..78dcdc77 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -17,7 +17,13 @@ COPY ./src ./src COPY ./public ./public COPY next.config.mjs .eslintignore .eslintrc.json postcss.config.js ./next-i18next.config.js ./entrypoint.sh prettier.config.js tsconfig.json tailwind.config.ts ./ -RUN NEXT_PUBLIC_UMAMI_ENABLED=true NEXT_PUBLIC_NODE_ENV=production NEXT_PUBLIC_APM_ENABLED=true yarn build && rm -rf ./.next/cache +ARG NEXT_PUBLIC_MANAGER_BASE_DOMAIN +ARG NEXT_PUBLIC_API_URL +ARG NEXT_PUBLIC_NETWORK_NAME +ARG NEXT_PUBLIC_DOCKER_NETWORK_NAME +ARG NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME + +RUN NEXT_PUBLIC_UMAMI_ENABLED=true NEXT_PUBLIC_NODE_ENV=production NEXT_PUBLIC_APM_ENABLED=true NEXT_PUBLIC_API_URL=$NEXT_PUBLIC_API_URL NEXT_PUBLIC_MANAGER_BASE_DOMAIN=$NEXT_PUBLIC_MANAGER_BASE_DOMAIN NEXT_PUBLIC_NETWORK_NAME=$NEXT_PUBLIC_NETWORK_NAME NEXT_PUBLIC_DOCKER_NETWORK_NAME=$NEXT_PUBLIC_DOCKER_NETWORK_NAME NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME=$NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME yarn build && rm -rf ./.next/cache FROM node:18-alpine diff --git a/frontend/README.md b/frontend/README.md index 5ce4a7c6..f600741a 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -1,8 +1,19 @@ -This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). - ## Getting Started -First, run the development server: +First, install the required dependencies for the project using the following command: + +```bash +yarn install +``` + +Once the installation is complete create new file `.env` and copy env variables form `.env.example` to `.env` and update the env variables. + +- **`NEXT_PUBLIC_NETWORK_NAME`**: Set to`sanchonet` or `preview` or `preprod`. + **`Note`**: It will only display network type in web app. You need to update `agent-manager`, `backend` and `dbsync-api` services to change thw working of functions in other networks. +- - **`NEXT_PUBLIC_ENABLE_AGENT_INSTANCE`**: Enable it by adding `true` to run `multiple instances of single agent` feature where same type of functions will be executed by multiple instance of agent. To use this feature you also need to increase the instance number from `Agent Overview ` section. +- - **`NEXT_PUBLIC_API_URL`**: `Backend ` service url accessed from browser. + +##### Finally run one of the commands below to start the development server: ```bash npm run dev @@ -14,23 +25,4 @@ pnpm dev bun dev ``` -Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. - -You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. - -This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font. - -## Learn More - -To learn more about Next.js, take a look at the following resources: - -- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. -- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. - -You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! - -## Deploy on Vercel - -The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. - -Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. +Open [http://localhost:3000](http://localhost:3000) in your browser to view the application. diff --git a/frontend/src/app/(pages)/templates/create-template/components/event/RenderEventChildForm.tsx b/frontend/src/app/(pages)/templates/create-template/components/event/RenderEventChildForm.tsx index 0e87639f..c82d96e0 100644 --- a/frontend/src/app/(pages)/templates/create-template/components/event/RenderEventChildForm.tsx +++ b/frontend/src/app/(pages)/templates/create-template/components/event/RenderEventChildForm.tsx @@ -32,14 +32,14 @@ const RenderEventChildForm = ({ }) => { const [localOperator, setLocalOperator] = useState(eventFilterParam.operator || 'eq'); const [value, setValue] = useState(eventFilterParam.value); - const [deleteBtnClicked,setDeleteBtnClicked] = useState(false) + const [deleteBtnClicked, setDeleteBtnClicked] = useState(false); useEffect(() => { if (deleteBtnClicked) { setLocalOperator(eventFilterParam.operator || 'eq'); setValue(eventFilterParam.value); } - setDeleteBtnClicked(false) + setDeleteBtnClicked(false); }, [deleteBtnClicked]); const paramId = Array.isArray(eventFilterParam.id) @@ -77,7 +77,7 @@ const RenderEventChildForm = ({ const handleOnDeleteParam = (paramId: string | string[]) => { onDeleteParameter && onDeleteParameter(paramId); - setDeleteBtnClicked(true) + setDeleteBtnClicked(true); }; const handleInputChange = (e: React.ChangeEvent) => { diff --git a/frontend/src/components/Agent/RunnerTutorial.tsx b/frontend/src/components/Agent/RunnerTutorial.tsx index dd39d896..c9115985 100644 --- a/frontend/src/components/Agent/RunnerTutorial.tsx +++ b/frontend/src/components/Agent/RunnerTutorial.tsx @@ -7,7 +7,9 @@ import environments from '@app/configs/environments'; import { convertToBase64 } from '@app/utils/base64converter'; const AgentRunnerTutorial = ({ agentSecretKey, showToken }: { agentSecretKey: string; showToken?: boolean }) => { - const dockerCommand = `docker run -d --pull always -e TOKEN=${convertToBase64(agentSecretKey)} cardanoapi/autonomous-agents:${environments.NEXT_PUBLIC_IMAGE_TAG}`; + const dockerCommand = environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN + ? `docker run -d --network=${environments.NEXT_PUBLIC_DOCKER_NETWORK_NAME || 'autonomous_agent'} -e TOKEN=${convertToBase64(agentSecretKey)} -e MANAGER_BASE_DOMAIN=${environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN} ${environments.NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME || 'cardanoapi/autonomous-agents:Dev'}` + : `docker run -d --pull always -e TOKEN=${convertToBase64(agentSecretKey)} cardanoapi/autonomous-agents:${environments.NEXT_PUBLIC_IMAGE_TAG}`; return (
diff --git a/frontend/src/configs/environments.ts b/frontend/src/configs/environments.ts index 2e3195f0..67171096 100644 --- a/frontend/src/configs/environments.ts +++ b/frontend/src/configs/environments.ts @@ -68,7 +68,10 @@ export const environments = { APM_ENABLED: process.env.ELASTIC_APM_SERVER_URL && process.env.ELASTIC_APM_SERVICE_NAME, NEXT_PUBLIC_IMAGE_TAG: process.env.NEXT_PUBLIC_IMAGE_TAG ?? 'dev', GOVTOOL_BASE_URL: 'https://govtool.cardanoapi.io', - NEXT_PUBLIC_ENABLE_AGENT_INSTANCE: process.env.NEXT_PUBLIC_ENABLE_AGENT_INSTANCE === 'true' || false + NEXT_PUBLIC_ENABLE_AGENT_INSTANCE: process.env.NEXT_PUBLIC_ENABLE_AGENT_INSTANCE === 'true' || false, + NEXT_PUBLIC_MANAGER_BASE_DOMAIN: process.env.NEXT_PUBLIC_MANAGER_BASE_DOMAIN || '', + NEXT_PUBLIC_DOCKER_NETWORK_NAME: process.env.NEXT_PUBLIC_DOCKER_NETWORK_NAME || '', + NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME: process.env.NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME || '' }; if (typeof window !== 'undefined') { diff --git a/frontend/src/utils/base64converter.ts b/frontend/src/utils/base64converter.ts index 1e25b4d8..64396b63 100644 --- a/frontend/src/utils/base64converter.ts +++ b/frontend/src/utils/base64converter.ts @@ -1,7 +1,9 @@ import environments from '@app/configs/environments'; export function convertToBase64(agentSecretKey: string) { - const newSecretKey = environments.network + '_' + agentSecretKey; + const newSecretKey = environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN + ? '_' + agentSecretKey + : environments.network + '_' + agentSecretKey; const buffer = new Buffer(newSecretKey); return buffer.toString('base64'); }