From 737887c6bf7d141fa5c7a03ada26827328377c2e Mon Sep 17 00:00:00 2001 From: Sital Nagarkoti Date: Wed, 8 Jan 2025 15:11:44 +0545 Subject: [PATCH 1/5] fix: Update docker-compose.dev.yml --- README.md | 26 ++++++++++++++++------ docker-compose.dev.yml | 50 ++++++++++++++++++++++++++++-------------- frontend/.env.example | 2 +- 3 files changed, 53 insertions(+), 25 deletions(-) diff --git a/README.md b/README.md index 81845ba6b..46e0719bd 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,29 @@ -Cardano Autonomous Agent Monorepo -============ +### Welcome to Autonomous Agent Testing +Autonomous Agent Testing primarily focuses on evaluating the features introduced in the Cardano Improvement Proposal (CIP) 1694. +This includes testing the creation and voting mechanisms for proposals to ensure the governance model operates seamlessly. +Additionally, it verifies functionalities like registering and deregistering as a Delegated Representative (DRep), +managing stake registrations and deregistrations, and performing ADA transfers. It also provides the feature to trigger these function either +Manually or by setting a CRON schedule or by event filtering The testing process ensures these operations are secure, efficient, +and align with the decentralized governance objectives of Cardano's Voltaire era. -1. [Backend](api/) -2. [Agent Manager](agent-manager/) -3. [Agent](agent-node/) -4. [Frontend](frontend/) +## Running the deployed service +[Autonomous Agent Testing](https://agents.cardanoapi.io/) ## Running the stack locally +### Directory Structure +1. `api`: the backend service +2. `manager`: Middleman between agents and backend. Also handles different services for agent +3. `agent-node`: agent for handling various functions +4. `frontend`: UI for autonomous agent testing +5. `dbsync-api`: handling services related to dbsync database + +### Using Docker + Setup the required dependencies by running the command locally. ```shell -docker compose -f docker-compose.local.yml up -d +docker compose -f docker-compose.dev.yml up -d ``` **Note**: You can also use already existing services in place of this diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 138ce68e1..63929dc1a 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -10,7 +10,8 @@ services: ports: - "3000:3000" environment: - - "NEXT_PUBLIC_API_ENDPOINT_HOST=http://api.agents.cardanoapi.io/api" + - NEXT_PUBLIC_API_ENDPOINT_HOST=http://api:8000/api + - NEXT_PUBLIC_NETWORK_NAME=sanchonet # FastAPI application (build the image) @@ -22,7 +23,10 @@ services: - DATABASE_URL=postgresql://root:root@postgres:5432/cardano_autonomous_agent_testing_db - KAFKA_BROKERS=kafka:9093 - DOCS_URL=/api/docs - - OPENAPI_URL=/aio/openapi.json + - AGENT_MNEMONIC='group shadow belt culture garage sport demand target twist tribe milk dumb divide mango bench fantasy okay unfair error twice struggle naive squirrel romance' + - KAFKA_ENABLED='True' + - METADATA_API='https://metadata.drep.id/api' + - DB_SYNC_API='https://dbsyncapi.agents.cardanoapi.io/api' depends_on: - postgres - kafka @@ -41,23 +45,35 @@ services: - api environment: - DATABASE_URL=postgresql://root:root@postgres:5432/cardano_autonomous_agent_testing_db - - BROKER_URL=kafka:9093 + - KAFKA_BROKERS=kafka:9093 - CLIENT_ID=my-app - - CARDANO_NODE_URL=95.217.224.100:3006 - - API_URL=http://api:8000 + - CARDANO_NODE_URL=172.31.0.4:3004 + - KUBER_BASE_URL='https://sanchonet.kuber.cardanoapi.io' + - KUBER_API_KEY='l3HSdpCx86BOZFEvHspmYZ8KiVOnnvJDLKkC7JMVKUu9pZlV0Ld0qbn3pTQ3ER' + - MANAGER_WALLET_ADDRESS=addr_test1qrd3hs7rlxwwdzthe6hj026dmyt3y0heuulctscyydh2kguh4xfmpjqkd25vfq69hcvj27jqyk4hvnyxu7vma2c4kvps8eh2m3 + - MANAGER_WALLET_SIGNING_KEY=addr_sk14wrrctnv9cyr05vjnwrjcs7mujzuxf4zj5nm7fna5nx7fxrnrupqwhwycx + - FAUCET_API_KEY=ayEO6dlVF18oTslr9eIMUNgE35GqVfFz + - AGENT_MNEMONIC='group shadow belt culture garage sport demand target twist tribe milk dumb divide mango bench fantasy okay unfair error twice struggle naive squirrel romance' + - METADATA_BASE_URL='https://metadata.drep.id' + - DB_SYNC_BASE_URL='https://dbsyncapi.agents.cardanoapi.io/api' + - CARDANO_NETWORK_MAGIC=4 + - BLOCKFROST_API_KEY='sanchonetWC2xdJbuASgECPHevRkMkh6QQqJf2nve' + - ENABLE_BLOCKFROST_SUBMIT_API='True' + - SERVER_PORT=3002 + - NETWORK_NAME='sanchonet' - #Agent (build image) - agent: - build: - context: ./agent-node - dockerfile: Dockerfile - ports: - - "3002:3002" - depends_on: - - agent_manager - environment: - - WS_URL= # Use service name as hostname within Docker network - - AGENT_ID= # Provide the agent ID as needed +# #Agent (build image) +# agent: +# build: +# context: ./agent-node +# dockerfile: Dockerfile +# ports: +# - "3002:3002" +# depends_on: +# - agent_manager +# environment: +# - WS_URL= # Use service name as hostname within Docker network +# - AGENT_ID= # Provide the agent ID as needed #Database postgres: diff --git a/frontend/.env.example b/frontend/.env.example index 467572271..548323932 100644 --- a/frontend/.env.example +++ b/frontend/.env.example @@ -1,4 +1,4 @@ API_URL = 'http://localhost:8000/api' # internal api url NEXT_PUBLIC_API_URL = # api url accessed from browser -NEXT_PUBLIC_NETWORK_NAME = sanchonet # cardano network on which the paltform is running +NEXT_PUBLIC_NETWORK_NAME = sanchonet # cardano network on which the platform is running NEXT_PUBLIC_ENABLE_AGENT_INSTANCE=false \ No newline at end of file From ba6133a72d8cb34a7a030af78183e1d6612aa7e5 Mon Sep 17 00:00:00 2001 From: Sital999 Date: Wed, 8 Jan 2025 16:11:34 +0545 Subject: [PATCH 2/5] fix: Update docker compose for readme --- README.md | 57 ++++++++- api/.env.example | 4 +- api/backend/app/controllers/health.py | 2 +- .../controllers/internal/metadata_router.py | 4 +- api/backend/app/services/agent_service.py | 8 +- api/backend/app/services/drep_service.py | 10 +- api/backend/app/services/proposal_service.py | 8 +- api/backend/config/api_settings.py | 4 +- docker-compose.dev.yml | 26 +++- docker-compose.local.yml | 120 +++++++++--------- .../components/event/RenderEventChildForm.tsx | 6 +- 11 files changed, 158 insertions(+), 91 deletions(-) diff --git a/README.md b/README.md index 46e0719bd..4336cb0c7 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,62 @@ and align with the decentralized governance objectives of Cardano's Voltaire era ### Using Docker -Setup the required dependencies by running the command locally. +Before running whole service locally using docker you need to add few of the environment variables +on file `docker-compose.dev.yml` + +##### api +- AGENT_MNEMONIC + - Add seed phrase to generate wallet + +##### agent_manager +- KUBER_API_KEY + - Visit [KuberIde](https://kuberide.com/kuber/settings/api-keys) and generate api-key +- MANAGER_WALLET_ADDRESS (OPTIONAL) +- MANAGER_WALLET_SIGNING_KEY (OPTIONAL) + - Add a wallet address having sufficient ADA so that it can be used to transfer ADA to agent when requested +- FAUCET_API_KEY (OPTIONAL) + - Add faucet api key to load ADA which will be used to transfer ADA to agents as per request. And it will only be used if the provided `MANAGER_WALLET_ADDRESS` doesnot have sufficient ADA. +- AGENT_MNEMONIC + - Add seed phrase to generate wallet that should be same as added in `api` +- BLOCKFROST_API_KEY (Required if ENABLE_BLOCKFROST_SUBMIT_API is 'True' or enabled) + - Visit [Blockfrost](https://blockfrost.io/) and sign up and generate api key + +Note: environment variable `ENABLE_BLOCKFROST_SUBMIT_API` is preferred as if it is not enabled then `Kuber` will be used to submit the transaction which might take couple of minutes. + +##### dbsync +- DATABASE_URL + - Add database url of dbsync + +Furthermore all env are setup to run in `Sanchonet` so if you want to run in `Preprod` or `Preview` +Network then following environment variables are to be updated: + +##### frontend +- NEXT_PUBLIC_NETWORK_NAME + - preview or preprod + +##### api and manager +- DB_SYNC_BASE_URL + - https://preprod-dbync.agents.cardanoapi.io/api for `preprod` + - https://preview-dbync.agents.cardanoapi.io/api for `preview` + +##### manager only +- KUBER_BASE_URL + - https://preview.kuber.cardanoapi.io for `preview` + - https://preprod.kuber.cardanoapi.io for `preprod` + +- CARDANO_NETWORK_MAGIC + - 3 for `preview` + - 2 for `preprod` + +- BLOCKFROST_API_KEY + - Visit [Blockfrost](https://blockfrost.io/) and sign up and generate api key based on desired network type + +- NETWORK_NAME + - preprod or preview + +##### dbsync +- DATABASE_URL + - Update the dbsync database url and database name accordingly ```shell docker compose -f docker-compose.dev.yml up -d diff --git a/api/.env.example b/api/.env.example index 1000d6612..a3430102d 100644 --- a/api/.env.example +++ b/api/.env.example @@ -15,7 +15,7 @@ KAFKA_PREFIX= JWT_SECRET_KEY="" -METADATA_API='' -DB_SYNC_API='' +METADATA_BASE_URL='' +DB_SYNC_BASE_URL='' ELASTIC_APM_SERVER_URL=https://apm.sireto.io diff --git a/api/backend/app/controllers/health.py b/api/backend/app/controllers/health.py index f3ed33477..c20f20d4e 100644 --- a/api/backend/app/controllers/health.py +++ b/api/backend/app/controllers/health.py @@ -73,7 +73,7 @@ async def readiness_check(): async def dbsync_health_check(): async with aiohttp.ClientSession() as session: - async with session.get(api_settings.DB_SYNC_API + "/health") as response: + async with session.get(api_settings.DB_SYNC_BASE_URL + "/health") as response: if response.status == 200: return True return False diff --git a/api/backend/app/controllers/internal/metadata_router.py b/api/backend/app/controllers/internal/metadata_router.py index a61541ba6..ebf97754e 100644 --- a/api/backend/app/controllers/internal/metadata_router.py +++ b/api/backend/app/controllers/internal/metadata_router.py @@ -9,13 +9,13 @@ class MetadataRouter(Routable): def __init__(self): super().__init__() - self.metadata_api = APISettings().METADATA_API + self.metadata_base_url = APISettings().METADATA_BASE_URL @get("/metadata") async def fetch_metadata(self, metadata_url: str): async with aiohttp.ClientSession() as session: async with session.get( - f"{self.metadata_api}/metadata?url={metadata_url}&hash=1111111111111111111111111111111111111111111111111111111111111112" + f"{self.metadata_base_url}/metadata?url={metadata_url}&hash=1111111111111111111111111111111111111111111111111111111111111112" ) as resp: response = await resp.json() if resp.ok: diff --git a/api/backend/app/services/agent_service.py b/api/backend/app/services/agent_service.py index 8ad75e1dd..d483659e6 100644 --- a/api/backend/app/services/agent_service.py +++ b/api/backend/app/services/agent_service.py @@ -205,7 +205,7 @@ async def fetch_data(self, url, session: ClientSession): raise HTTPException(status_code=400, content="Error fetching agent Drep details") async def fetch_balance(self, stake_address: str, session: ClientSession): - async with session.get(f"{api_settings.DB_SYNC_API}/address/balance?address={stake_address}") as response: + async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/address/balance?address={stake_address}") as response: try: return await response.json() except: @@ -215,7 +215,7 @@ async def fetch_balance(self, stake_address: str, session: ClientSession): ) async def fetch_drep_details(self, drep_id: str, session: ClientSession) -> Dict[str, float | bool]: - async with session.get(f"{api_settings.DB_SYNC_API}/drep/{drep_id}") as response: + async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/drep/{drep_id}") as response: try: res = await response.json() voting_power = res.get("votingPower") / (10**6) if res.get("votingPower") else 0 @@ -228,7 +228,7 @@ async def fetch_drep_details(self, drep_id: str, session: ClientSession) -> Dict ) async def fetch_stake_address_details(self, stake_address: str, session: ClientSession): - async with session.get(f"{api_settings.DB_SYNC_API}/stake-address?address={stake_address}") as response: + async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/stake-address?address={stake_address}") as response: try: is_stake_registered = False res = await response.json() @@ -250,7 +250,7 @@ async def fetch_stake_address_details(self, stake_address: str, session: ClientS ) async def fetch_delegation_details(self, stake_address: str, session: ClientSession): - async with session.get(f"{api_settings.DB_SYNC_API}/delegation?address={stake_address}") as response: + async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/delegation?address={stake_address}") as response: try: res = await response.json() drep_id = res.get("drep", {}).get("drep_id") if res.get("drep") else None diff --git a/api/backend/app/services/drep_service.py b/api/backend/app/services/drep_service.py index b5a961ea5..b833989b7 100644 --- a/api/backend/app/services/drep_service.py +++ b/api/backend/app/services/drep_service.py @@ -58,7 +58,7 @@ async def fetch_internal_dreps(self, page: int, page_size: int, search: str | No async def fetch_metadata(self, agent: Agent, index: int, agents: list[Any], session: ClientSession): drep_dict = {} drep_id = convert_base64_to_hex(agent.wallet_details[0].stake_key_hash) - async with session.get(f"{api_settings.DB_SYNC_API}/drep?search={drep_id}") as response: + async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/drep?search={drep_id}") as response: response_json = await response.json() if response_json["items"]: if drep_id == response_json["items"][0]["drepId"]: @@ -68,7 +68,7 @@ async def fetch_metadata(self, agent: Agent, index: int, agents: list[Any], sess metadata_hash = drep_dict.get("metadataHash") try: async with session.get( - f"{api_settings.METADATA_API}/metadata?url={url}&hash={metadata_hash}" + f"{api_settings.METADATA_BASE_URL}/metadata?url={url}&hash={metadata_hash}" ) as metadata_resp: metadata_resp_json = await metadata_resp.json() if "hash" in metadata_resp_json: @@ -84,9 +84,9 @@ async def fetch_metadata(self, agent: Agent, index: int, agents: list[Any], sess async def fetch_external_dreps(self, page: int, page_size: int, search: str | None): if search: - fetchUrl = f"{api_settings.DB_SYNC_API}/drep?search={search}" + fetchUrl = f"{api_settings.DB_SYNC_BASE_URL}/drep?search={search}" else: - fetchUrl = f"{api_settings.DB_SYNC_API}/drep?page={page}&size={page_size}" + fetchUrl = f"{api_settings.DB_SYNC_BASE_URL}/drep?page={page}&size={page_size}" async with aiohttp.ClientSession() as session: async with session.get(fetchUrl) as response: @@ -129,7 +129,7 @@ async def fetch_metadata_for_drep(self, metadata_hash: str, url: str, drep: Any) try: async with aiohttp.ClientSession() as session: async with session.get( - f"{api_settings.METADATA_API}/metadata?url={url}&hash={metadata_hash}" + f"{api_settings.METADATA_BASE_URL}/metadata?url={url}&hash={metadata_hash}" ) as metadata_resp: metadata_resp_json = await metadata_resp.json() if "hash" in metadata_resp_json: diff --git a/api/backend/app/services/proposal_service.py b/api/backend/app/services/proposal_service.py index 0a523f91f..c40d41273 100644 --- a/api/backend/app/services/proposal_service.py +++ b/api/backend/app/services/proposal_service.py @@ -66,7 +66,7 @@ async def get_internal_proposals(self, page: int = 1, pageSize: int = 10, search async def add_metadata_and_agent_detail_in_internal_proposal( self, proposal: TriggerHistoryDto, index: int, results: list[Any] ): - url = f"{api_settings.DB_SYNC_API}/proposal?proposal={proposal.txHash}" + url = f"{api_settings.DB_SYNC_BASE_URL}/proposal?proposal={proposal.txHash}" proposal_data = await self._fetch_proposal_data(url) if not proposal_data: results[index] = "" @@ -83,9 +83,9 @@ async def add_metadata_and_agent_detail_in_internal_proposal( results[index] = proposal_dict async def get_external_proposals(self, page: int, pageSize: int, sort: str, search: str | None = None): - search_url = f"{api_settings.DB_SYNC_API}/proposal?page={page}&size={pageSize}&sort={sort}" + search_url = f"{api_settings.DB_SYNC_BASE_URL}/proposal?page={page}&size={pageSize}&sort={sort}" if search: - search_url = f"{api_settings.DB_SYNC_API}/proposal?proposal={search}" + search_url = f"{api_settings.DB_SYNC_BASE_URL}/proposal?proposal={search}" async with aiohttp.ClientSession() as session: async with session.get(search_url) as response: @@ -126,7 +126,7 @@ async def _fetch_metadata(self, metadata_hash: str, url: str, proposal_dict: Any try: async with aiohttp.ClientSession() as session: async with session.get( - f"{api_settings.METADATA_API}/metadata?url={url}&hash={metadata_hash}" + f"{api_settings.METADATA_BASE_URL}/metadata?url={url}&hash={metadata_hash}" ) as metadata_resp: metadata_resp_json = await metadata_resp.json() if "hash" in metadata_resp_json: diff --git a/api/backend/config/api_settings.py b/api/backend/config/api_settings.py index a493baa1b..4d5d36704 100644 --- a/api/backend/config/api_settings.py +++ b/api/backend/config/api_settings.py @@ -5,9 +5,9 @@ class APISettings(BaseSettings): APP_ENV: str = "production" SECURE: bool = None JWT_SECRET_KEY: str = "" - DB_SYNC_API: str = "https://dbsyncapi.agents.cardanoapi.io/api" + DB_SYNC_BASE_URL: str = "https://dbsyncapi.agents.cardanoapi.io/api" SAME_SITE = "None" - METADATA_API: str = "https://metadata.drep.id/api" + METADATA_BASE_URL: str = "https://metadata.drep.id/api" GOV_ACTION_API: str = "https://govtool.cardanoapi.io/api" KAFKA_TOPIC_PREFIX: str = "" KAFKA_PREFIX: str = "" diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 63929dc1a..b951f5b4e 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -25,8 +25,8 @@ services: - DOCS_URL=/api/docs - AGENT_MNEMONIC='group shadow belt culture garage sport demand target twist tribe milk dumb divide mango bench fantasy okay unfair error twice struggle naive squirrel romance' - KAFKA_ENABLED='True' - - METADATA_API='https://metadata.drep.id/api' - - DB_SYNC_API='https://dbsyncapi.agents.cardanoapi.io/api' + - METADATA_BASE_URL='https://metadata.drep.id/api' + - DB_SYNC_BASE_URL='http://dbsync:9000/api' depends_on: - postgres - kafka @@ -49,10 +49,10 @@ services: - CLIENT_ID=my-app - CARDANO_NODE_URL=172.31.0.4:3004 - KUBER_BASE_URL='https://sanchonet.kuber.cardanoapi.io' - - KUBER_API_KEY='l3HSdpCx86BOZFEvHspmYZ8KiVOnnvJDLKkC7JMVKUu9pZlV0Ld0qbn3pTQ3ER' - - MANAGER_WALLET_ADDRESS=addr_test1qrd3hs7rlxwwdzthe6hj026dmyt3y0heuulctscyydh2kguh4xfmpjqkd25vfq69hcvj27jqyk4hvnyxu7vma2c4kvps8eh2m3 - - MANAGER_WALLET_SIGNING_KEY=addr_sk14wrrctnv9cyr05vjnwrjcs7mujzuxf4zj5nm7fna5nx7fxrnrupqwhwycx - - FAUCET_API_KEY=ayEO6dlVF18oTslr9eIMUNgE35GqVfFz + - KUBER_API_KEY=l3HSdpCx86BOZFEvHspmYZ8KiVOnnvJDLKkC7JMVKUu9pZlV0Ld0qbn3pTQ3ER + - MANAGER_WALLET_ADDRESS= + - MANAGER_WALLET_SIGNING_KEY= + - FAUCET_API_KEY= - AGENT_MNEMONIC='group shadow belt culture garage sport demand target twist tribe milk dumb divide mango bench fantasy okay unfair error twice struggle naive squirrel romance' - METADATA_BASE_URL='https://metadata.drep.id' - DB_SYNC_BASE_URL='https://dbsyncapi.agents.cardanoapi.io/api' @@ -62,7 +62,19 @@ services: - SERVER_PORT=3002 - NETWORK_NAME='sanchonet' -# #Agent (build image) +# DbSync + dbsync: + build: + context: ./dbsync-api + dockerfile: Dockerfile + ports: + - "9000:9000" + environment: + - PORT=9000 + - CORS_ENABLE=true + - DATABASE_URL=postgres://dbsync:9PW%fj36ozwm^8u@mVwoh!uE&R@172.31.0.4:8433/dbsync_sanchonet + + # #Agent (build image) # agent: # build: # context: ./agent-node diff --git a/docker-compose.local.yml b/docker-compose.local.yml index 5d0be260e..278566957 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -18,66 +18,66 @@ services: - cardano-autonomous-backend # #Pgadmin -# pgadmin: -# image: dpage/pgadmin4:8.2 -# container_name: pg-admin -# environment: -# PGADMIN_DEFAULT_EMAIL: admin@pgadmin.com -# PGADMIN_DEFAULT_PASSWORD: password -# ports: -# - "5050:80" -# networks: -# - cardano-autonomous-backend -# -##Zookeper -# zookeeper: -# image: confluentinc/cp-zookeeper:7.0.1 -# container_name: zookeeper_agent_autonomous -# ports: -# - "2181:2181" -# environment: -# ZOOKEEPER_CLIENT_PORT: 2181 -# ZOOKEEPER_TICK_TIME: 2000 -# ZOOKEEPER_SYNC_LIMIT: 2 -# networks: -# - cardano-autonomous-backend -# -# #Kafka -# kafka: -# image: confluentinc/cp-kafka:7.0.1 -# hostname: kafka -# container_name: kafka_agent_autonomous -# depends_on: -# - zookeeper -# ports: -# - "9092:9092" -# environment: -# KAFKA_BROKER_ID: 1 -# KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 -# KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER_NET:PLAINTEXT,HOST_NET:PLAINTEXT -# KAFKA_ADVERTISED_LISTENERS: DOCKER_NET://kafka:9093,HOST_NET://localhost:9092 -# KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER_NET -# KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 -# KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 -# KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 -# KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 -# KAFKA_JMX_PORT: 9999 -# KAFKA_CREATE_TOPICS: "trigger_config_updates:1:1" -# networks: -# - cardano-autonomous-backend -# -##Kafka UI -# kafka-ui: -# image: provectuslabs/kafka-ui -# container_name: kafka-ui-agent-autonomous -# ports: -# - "8080:8080" -# restart: always -# environment: -# - KAFKA_CLUSTERS_0_NAME=local -# - KAFKA_CLUSTERS_0_BOOTSTRAP_SERVERS=kafka:9093 -# networks: -# - cardano-autonomous-backend + pgadmin: + image: dpage/pgadmin4:8.2 + container_name: pg-admin + environment: + PGADMIN_DEFAULT_EMAIL: admin@pgadmin.com + PGADMIN_DEFAULT_PASSWORD: password + ports: + - "5050:80" + networks: + - cardano-autonomous-backend + +#Zookeper + zookeeper: + image: confluentinc/cp-zookeeper:7.0.1 + container_name: zookeeper_agent_autonomous + ports: + - "2181:2181" + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + ZOOKEEPER_SYNC_LIMIT: 2 + networks: + - cardano-autonomous-backend + + #Kafka + kafka: + image: confluentinc/cp-kafka:7.0.1 + hostname: kafka + container_name: kafka_agent_autonomous + depends_on: + - zookeeper + ports: + - "9092:9092" + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER_NET:PLAINTEXT,HOST_NET:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: DOCKER_NET://kafka:9093,HOST_NET://localhost:9092 + KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER_NET + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_JMX_PORT: 9999 + KAFKA_CREATE_TOPICS: "trigger_config_updates:1:1" + networks: + - cardano-autonomous-backend + +#Kafka UI + kafka-ui: + image: provectuslabs/kafka-ui + container_name: kafka-ui-agent-autonomous + ports: + - "8080:8080" + restart: always + environment: + - KAFKA_CLUSTERS_0_NAME=local + - KAFKA_CLUSTERS_0_BOOTSTRAP_SERVERS=kafka:9093 + networks: + - cardano-autonomous-backend networks: cardano-autonomous-backend: diff --git a/frontend/src/app/(pages)/templates/create-template/components/event/RenderEventChildForm.tsx b/frontend/src/app/(pages)/templates/create-template/components/event/RenderEventChildForm.tsx index 0e87639f6..c82d96e09 100644 --- a/frontend/src/app/(pages)/templates/create-template/components/event/RenderEventChildForm.tsx +++ b/frontend/src/app/(pages)/templates/create-template/components/event/RenderEventChildForm.tsx @@ -32,14 +32,14 @@ const RenderEventChildForm = ({ }) => { const [localOperator, setLocalOperator] = useState(eventFilterParam.operator || 'eq'); const [value, setValue] = useState(eventFilterParam.value); - const [deleteBtnClicked,setDeleteBtnClicked] = useState(false) + const [deleteBtnClicked, setDeleteBtnClicked] = useState(false); useEffect(() => { if (deleteBtnClicked) { setLocalOperator(eventFilterParam.operator || 'eq'); setValue(eventFilterParam.value); } - setDeleteBtnClicked(false) + setDeleteBtnClicked(false); }, [deleteBtnClicked]); const paramId = Array.isArray(eventFilterParam.id) @@ -77,7 +77,7 @@ const RenderEventChildForm = ({ const handleOnDeleteParam = (paramId: string | string[]) => { onDeleteParameter && onDeleteParameter(paramId); - setDeleteBtnClicked(true) + setDeleteBtnClicked(true); }; const handleInputChange = (e: React.ChangeEvent) => { From e9bd1fef4549f0f5d4de26e057820f90578008b3 Mon Sep 17 00:00:00 2001 From: Sital999 Date: Fri, 10 Jan 2025 14:41:08 +0545 Subject: [PATCH 3/5] fix: Update docker-compose file and readme for it --- .env.example | 70 ++++++++ .gitignore | 1 + README.md | 49 ++++-- agent-node/src/index.ts | 12 +- agent-node/src/utils/validator.ts | 3 +- docker-compose.dev.yml | 160 +++++++++--------- docker-compose.local.yml | 25 --- frontend/Dockerfile | 8 +- .../src/components/Agent/RunnerTutorial.tsx | 10 +- frontend/src/configs/environments.ts | 5 +- frontend/src/utils/base64converter.ts | 2 +- 11 files changed, 214 insertions(+), 131 deletions(-) create mode 100644 .env.example diff --git a/.env.example b/.env.example new file mode 100644 index 000000000..9d253d075 --- /dev/null +++ b/.env.example @@ -0,0 +1,70 @@ +# Frontend +NEXT_PUBLIC_API_URL=http://localhost:8000/api +NEXT_PUBLIC_NETWORK_NAME=sanchonet +NEXT_PUBLIC_MANAGER_BASE_DOMAIN=agent_manager:3001 + +# API and Manager Common +KAFKA_PREFIX=testing +DATABASE_URL=postgresql://root:root@postgres:5432/cardano_autonomous_agent_testing_db +KAFKA_BROKERS=kafka:9093 +AGENT_MNEMONIC= + +# API +DOCS_URL=/api/docs +KAFKA_ENABLED=true +METADATA_BASE_URL=https://metadata.drep.id/api +DB_SYNC_BASE_URL=http://dbsync:9000/api + +# Agent Manager +CLIENT_ID=my-app +CARDANO_NODE_URL=172.31.0.4:3004 +KUBER_BASE_URL=https://sanchonet.kuber.cardanoapi.io +KUBER_API_KEY= +MANAGER_WALLET_ADDRESS= +MANAGER_WALLET_SIGNING_KEY= +FAUCET_API_KEY= +CARDANO_NETWORK_MAGIC=4 +BLOCKFROST_API_KEY= +ENABLE_BLOCKFROST_SUBMIT_API=True +NETWORK_NAME=sanchonet + +# Postgres +POSTGRES_DB=cardano_autonomous_agent_testing_db +POSTGRES_USER=root +POSTGRES_PASSWORD=root + +# DbSync +DBSYNC_PORT=9000 +DBSYNC_CORS_ENABLE=true +DBSYNC_DATABASE_URL= + +# KAFKA +KAFKA_BROKER_ID= 1 +KAFKA_ZOOKEEPER_CONNECT= zookeeper:2181 +KAFKA_LISTENER_SECURITY_PROTOCOL_MAP= DOCKER_NET:PLAINTEXT,HOST_NET:PLAINTEXT +KAFKA_ADVERTISED_LISTENERS= DOCKER_NET://kafka:9093,HOST_NET://localhost:9092 +KAFKA_INTER_BROKER_LISTENER_NAME= DOCKER_NET +KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR= 1 +KAFKA_TRANSACTION_STATE_LOG_MIN_ISR= 1 +KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR= 1 +KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS= 0 +KAFKA_JMX_PORT= 9999 +KAFKA_CREATE_TOPICS= "trigger_config_updates:1:1" + +#zookeeper +ZOOKEEPER_CLIENT_PORT = 2181 +ZOOKEEPER_TICK_TIME = 2000 +ZOOKEEPER_SYNC_LIMIT = 2 + +# Docker network name +DOCKER_NETWORK_NAME=cardano_autonomous_agent + +# Docker network name +DOCKER_NETWORK_NAME=cardano_autonomous_agent + +# Agent +AGENT_NODE_DOCKER_IMAGE_NAME=autonomous_agent + + + + diff --git a/.gitignore b/.gitignore index a60403863..bd55d9c4a 100644 --- a/.gitignore +++ b/.gitignore @@ -3,5 +3,6 @@ app.log .idea/ .vscode/ */**/.vscode/ +.env diff --git a/README.md b/README.md index 4336cb0c7..45accb5d2 100644 --- a/README.md +++ b/README.md @@ -20,11 +20,15 @@ and align with the decentralized governance objectives of Cardano's Voltaire era ### Using Docker -Before running whole service locally using docker you need to add few of the environment variables -on file `docker-compose.dev.yml` +Before running whole service locally using docker you need to create a `.env` and `.env.dbsync` file from `.env.example` and `.env.dbsync.example` respectively to add environment variables. +Below are some of the descriptions of the environment variables. -##### api -- AGENT_MNEMONIC +**Changes to be made in `.env` file** + +##### api and manager +- KAFKA_PREFIX + - prefix for kafka topic +- AGENT_MNEMONIC - Add seed phrase to generate wallet ##### agent_manager @@ -35,20 +39,27 @@ on file `docker-compose.dev.yml` - Add a wallet address having sufficient ADA so that it can be used to transfer ADA to agent when requested - FAUCET_API_KEY (OPTIONAL) - Add faucet api key to load ADA which will be used to transfer ADA to agents as per request. And it will only be used if the provided `MANAGER_WALLET_ADDRESS` doesnot have sufficient ADA. -- AGENT_MNEMONIC - - Add seed phrase to generate wallet that should be same as added in `api` - BLOCKFROST_API_KEY (Required if ENABLE_BLOCKFROST_SUBMIT_API is 'True' or enabled) - Visit [Blockfrost](https://blockfrost.io/) and sign up and generate api key -Note: environment variable `ENABLE_BLOCKFROST_SUBMIT_API` is preferred as if it is not enabled then `Kuber` will be used to submit the transaction which might take couple of minutes. +***Note***: environment variable `ENABLE_BLOCKFROST_SUBMIT_API` is preferred as if it is not enabled then `Kuber` will be used to submit the transaction which might take couple of minutes. ##### dbsync -- DATABASE_URL +- DBSYNC_DATABASE_URL - Add database url of dbsync -Furthermore all env are setup to run in `Sanchonet` so if you want to run in `Preprod` or `Preview` +##### docker network name +- DOCKER_NETWORK_NAME + - Change name for docker network as default value is provided in `.env.example` + +##### agent +- AGENT_NODE_DOCKER_IMAGE_NAME + - Change name for docker network as default value is provided in `.env.example` + +***Note***: Furthermore all env are setup to run in `Sanchonet` so if you want to run in `Preprod` or `Preview` Network then following environment variables are to be updated: +**Changes to be made in `.env` file** ##### frontend - NEXT_PUBLIC_NETWORK_NAME - preview or preprod @@ -73,19 +84,33 @@ Network then following environment variables are to be updated: - NETWORK_NAME - preprod or preview -##### dbsync -- DATABASE_URL +##### dbsync +- DBSYNC_DATABASE_URL - Update the dbsync database url and database name accordingly + +Finally run the given command below: ```shell docker compose -f docker-compose.dev.yml up -d ``` -**Note**: You can also use already existing services in place of this +**Note** Make sure no application is running on port `3000`, `8000` + +**Note**: After running the above command line, you can run the agent by following steps: +###### For running agent: +- Visit frontend at `http://localhost:3000` and connect your wallet. +- Then click the `My Agent` tab at bottom left. you will be navigated to `Agents Page` +- In `Overview Tab` click `Run Agent` button at the top right of `Agents Overview Section` +- Now copy the docker command and run it in terminal. And Finally your agent is ready to run. +### Setup Locally + The setup guide for each services are in the respective directories: +For running all services locally some of the dependent services like `Kafka`, `Postgresql` can be run via Docker using following command. + + 1. [Backend](api/README.md) 2. [Agent Manager](agent-manager/README.md) 3. [Agent](agent-node/README.md) diff --git a/agent-node/src/index.ts b/agent-node/src/index.ts index 64f843f20..a9d2c6817 100644 --- a/agent-node/src/index.ts +++ b/agent-node/src/index.ts @@ -32,11 +32,15 @@ if (token) { } if (!wsUrl) { const network = token.split('_')[0] - - if (network && process.env.MANAGER_BASE_DOMAIN) { + const managerBaseDomain = process.env.MANAGER_BASE_DOMAIN + if (network && managerBaseDomain) { // This is set in docker file - wsUrl = `wss://${network.toLowerCase()}.${process.env.MANAGER_BASE_DOMAIN}` - } else { + wsUrl = `wss://${network.toLowerCase()}.${managerBaseDomain}` + } + else if (managerBaseDomain){ + wsUrl = `ws://${managerBaseDomain}` + } + else { wsUrl = 'ws://localhost:3001' } } diff --git a/agent-node/src/utils/validator.ts b/agent-node/src/utils/validator.ts index fbee26e78..170bebd5f 100644 --- a/agent-node/src/utils/validator.ts +++ b/agent-node/src/utils/validator.ts @@ -4,13 +4,12 @@ import { logicalFunctions } from './operatorSupport' const NetworkName = ['preview', 'preprod', 'sanchonet'] export function validateToken(token: string) { - if (token.split('_').length !== 2) { + if (token.split('_').length < 1) { return 'Not a valid token. Missing secret key' } if (token.split('_')[1].includes('undefined')) { return 'Not a valid token. Missing secret key' } - if (!NetworkName.includes(token.split('_')[0])) return 'Not a valid network name' return '' } diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index b951f5b4e..83b32ac9c 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -7,11 +7,16 @@ services: build: context: ./frontend dockerfile: Dockerfile + args: + - NEXT_PUBLIC_MANAGER_BASE_DOMAIN=${NEXT_PUBLIC_MANAGER_BASE_DOMAIN} + - NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL} + - NEXT_PUBLIC_NETWORK_NAME=${NEXT_PUBLIC_NETWORK_NAME} + - NEXT_PUBLIC_DOCKER_NETWORK_NAME=${DOCKER_NETWORK_NAME} + - NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME=${AGENT_NODE_DOCKER_IMAGE_NAME} ports: - "3000:3000" - environment: - - NEXT_PUBLIC_API_ENDPOINT_HOST=http://api:8000/api - - NEXT_PUBLIC_NETWORK_NAME=sanchonet + networks: + - autonomous-agent # FastAPI application (build the image) @@ -20,17 +25,20 @@ services: ports: - "8000:8000" environment: - - DATABASE_URL=postgresql://root:root@postgres:5432/cardano_autonomous_agent_testing_db - - KAFKA_BROKERS=kafka:9093 - - DOCS_URL=/api/docs - - AGENT_MNEMONIC='group shadow belt culture garage sport demand target twist tribe milk dumb divide mango bench fantasy okay unfair error twice struggle naive squirrel romance' - - KAFKA_ENABLED='True' - - METADATA_BASE_URL='https://metadata.drep.id/api' - - DB_SYNC_BASE_URL='http://dbsync:9000/api' + - DOCS_URL=${DOCS_URL} + - KAFKA_ENABLED=${KAFKA_ENABLED} + - METADATA_BASE_URL=${METADATA_BASE_URL} + - DB_SYNC_BASE_URL=${DB_SYNC_BASE_URL} + - KAFKA_PREFIX=${KAFKA_PREFIX} + - DATABASE_URL=${DATABASE_URL} + - KAFKA_BROKERS=${KAFKA_BROKERS} + - AGENT_MNEMONIC=${AGENT_MNEMONIC} + restart: on-failure:3 depends_on: - postgres - kafka - - pgadmin + networks: + - autonomous-agent # Agent Manager application (build image) agent_manager: @@ -39,28 +47,29 @@ services: dockerfile: Dockerfile ports: - "3001:3001" + restart: on-failure:3 depends_on: - postgres - kafka - api environment: - - DATABASE_URL=postgresql://root:root@postgres:5432/cardano_autonomous_agent_testing_db - - KAFKA_BROKERS=kafka:9093 - - CLIENT_ID=my-app - - CARDANO_NODE_URL=172.31.0.4:3004 - - KUBER_BASE_URL='https://sanchonet.kuber.cardanoapi.io' - - KUBER_API_KEY=l3HSdpCx86BOZFEvHspmYZ8KiVOnnvJDLKkC7JMVKUu9pZlV0Ld0qbn3pTQ3ER - - MANAGER_WALLET_ADDRESS= - - MANAGER_WALLET_SIGNING_KEY= - - FAUCET_API_KEY= - - AGENT_MNEMONIC='group shadow belt culture garage sport demand target twist tribe milk dumb divide mango bench fantasy okay unfair error twice struggle naive squirrel romance' - - METADATA_BASE_URL='https://metadata.drep.id' - - DB_SYNC_BASE_URL='https://dbsyncapi.agents.cardanoapi.io/api' - - CARDANO_NETWORK_MAGIC=4 - - BLOCKFROST_API_KEY='sanchonetWC2xdJbuASgECPHevRkMkh6QQqJf2nve' - - ENABLE_BLOCKFROST_SUBMIT_API='True' - - SERVER_PORT=3002 - - NETWORK_NAME='sanchonet' + - CLIENT_ID=${CLIENT_ID} + - CARDANO_NODE_URL=${CARDANO_NODE_URL} + - KUBER_BASE_URL=${KUBER_BASE_URL} + - KUBER_API_KEY=${KUBER_API_KEY} + - MANAGER_WALLET_ADDRESS=${MANAGER_WALLET_ADDRESS} + - MANAGER_WALLET_SIGNING_KEY=${MANAGER_WALLET_SIGNING_KEY} + - FAUCET_API_KEY=${FAUCET_API_KEY} + - CARDANO_NETWORK_MAGIC=${CARDANO_NETWORK_MAGIC} + - BLOCKFROST_API_KEY=${BLOCKFROST_API_KEY} + - ENABLE_BLOCKFROST_SUBMIT_API=${ENABLE_BLOCKFROST_SUBMIT_API} + - NETWORK_NAME=${NETWORK_NAME} + - KAFKA_PREFIX=${KAFKA_PREFIX} + - DATABASE_URL=${DATABASE_URL} + - KAFKA_BROKERS=${KAFKA_BROKERS} + - AGENT_MNEMONIC=${AGENT_MNEMONIC} + networks: + - autonomous-agent # DbSync dbsync: @@ -70,85 +79,76 @@ services: ports: - "9000:9000" environment: - - PORT=9000 - - CORS_ENABLE=true - - DATABASE_URL=postgres://dbsync:9PW%fj36ozwm^8u@mVwoh!uE&R@172.31.0.4:8433/dbsync_sanchonet + - PORT=${DBSYNC_PORT} + - CORS_ENABLE=${DBSYNC_CORS_ENABLE} + - DATABASE_URL=${DBSYNC_DATABASE_URL} - # #Agent (build image) -# agent: -# build: -# context: ./agent-node -# dockerfile: Dockerfile -# ports: -# - "3002:3002" -# depends_on: -# - agent_manager -# environment: -# - WS_URL= # Use service name as hostname within Docker network -# - AGENT_ID= # Provide the agent ID as needed + networks: + - autonomous-agent + + agent-node: + build: + context: ./agent-node + dockerfile: Dockerfile + image: ${AGENT_NODE_DOCKER_IMAGE_NAME} #Database postgres: image: postgres:16.2 environment: - POSTGRES_DB: cardano_autonomous_agent_testing_db - POSTGRES_USER: root - POSTGRES_PASSWORD: root + - POSTGRES_DB=${POSTGRES_DB} + - POSTGRES_USER=${POSTGRES_USER} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} ports: - "5432:5432" volumes: - pg-data:/var/lib/postgresql/data + networks: + - autonomous-agent - #Pgadmin - pgadmin: - image: dpage/pgadmin4:8.2 - environment: - PGADMIN_DEFAULT_EMAIL: admin@pgadmin.com - PGADMIN_DEFAULT_PASSWORD: password - ports: - - "5050:80" - -#Zookeper + #Zookeper zookeeper: image: confluentinc/cp-zookeeper:7.0.1 + container_name: zookeeper_agent_autonomous ports: - "2181:2181" environment: - ZOOKEEPER_CLIENT_PORT: 2181 - ZOOKEEPER_TICK_TIME: 2000 - ZOOKEEPER_SYNC_LIMIT: 2 + - ZOOKEEPER_CLIENT_PORT=${ZOOKEEPER_CLIENT_PORT} + - ZOOKEEPER_TICK_TIME=${ZOOKEEPER_TICK_TIME} + - ZOOKEEPER_SYNC_LIMIT=${ZOOKEEPER_SYNC_LIMIT} + networks: + - autonomous-agent - #Kafka + #Kafka kafka: image: confluentinc/cp-kafka:7.0.1 hostname: kafka + container_name: kafka_agent_autonomous depends_on: - zookeeper ports: - "9092:9092" + - "9093:9093" environment: - KAFKA_BROKER_ID: 1 - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER_NET:PLAINTEXT,HOST_NET:PLAINTEXT - KAFKA_ADVERTISED_LISTENERS: DOCKER_NET://kafka:9093,HOST_NET://localhost:9092 - KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER_NET - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 - KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 - KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 - KAFKA_JMX_PORT: 9999 - KAFKA_CREATE_TOPICS: "trigger_config_updates:1:1" + - KAFKA_BROKER_ID=${KAFKA_BROKER_ID} + - KAFKA_ZOOKEEPER_CONNECT=${KAFKA_ZOOKEEPER_CONNECT} + - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=${KAFKA_LISTENER_SECURITY_PROTOCOL_MAP} + - KAFKA_ADVERTISED_LISTENERS=${KAFKA_ADVERTISED_LISTENERS} + - KAFKA_INTER_BROKER_LISTENER_NAME=${KAFKA_INTER_BROKER_LISTENER_NAME} + - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=${KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR} + - KAFKA_TRANSACTION_STATE_LOG_MIN_ISR=${KAFKA_TRANSACTION_STATE_LOG_MIN_ISR} + - KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR=${KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR} + - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=${KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS} + - KAFKA_JMX_PORT=${KAFKA_JMX_PORT} + - KAFKA_CREATE_TOPICS=${KAFKA_CREATE_TOPICS} + networks: + - autonomous-agent -#Kafka UI - kafka-ui: - image: provectuslabs/kafka-ui - ports: - - "8080:8080" - restart: always - environment: - - KAFKA_CLUSTERS_0_NAME=local - - KAFKA_CLUSTERS_0_BOOTSTRAP_SERVERS=kafka:9093 +networks: + autonomous-agent: + name: ${DOCKER_NETWORK_NAME} + driver: bridge volumes: pg-data: \ No newline at end of file diff --git a/docker-compose.local.yml b/docker-compose.local.yml index 278566957..46475b53f 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -17,18 +17,6 @@ services: networks: - cardano-autonomous-backend -# #Pgadmin - pgadmin: - image: dpage/pgadmin4:8.2 - container_name: pg-admin - environment: - PGADMIN_DEFAULT_EMAIL: admin@pgadmin.com - PGADMIN_DEFAULT_PASSWORD: password - ports: - - "5050:80" - networks: - - cardano-autonomous-backend - #Zookeper zookeeper: image: confluentinc/cp-zookeeper:7.0.1 @@ -66,19 +54,6 @@ services: networks: - cardano-autonomous-backend -#Kafka UI - kafka-ui: - image: provectuslabs/kafka-ui - container_name: kafka-ui-agent-autonomous - ports: - - "8080:8080" - restart: always - environment: - - KAFKA_CLUSTERS_0_NAME=local - - KAFKA_CLUSTERS_0_BOOTSTRAP_SERVERS=kafka:9093 - networks: - - cardano-autonomous-backend - networks: cardano-autonomous-backend: name: backend diff --git a/frontend/Dockerfile b/frontend/Dockerfile index e67882c3e..78dcdc775 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -17,7 +17,13 @@ COPY ./src ./src COPY ./public ./public COPY next.config.mjs .eslintignore .eslintrc.json postcss.config.js ./next-i18next.config.js ./entrypoint.sh prettier.config.js tsconfig.json tailwind.config.ts ./ -RUN NEXT_PUBLIC_UMAMI_ENABLED=true NEXT_PUBLIC_NODE_ENV=production NEXT_PUBLIC_APM_ENABLED=true yarn build && rm -rf ./.next/cache +ARG NEXT_PUBLIC_MANAGER_BASE_DOMAIN +ARG NEXT_PUBLIC_API_URL +ARG NEXT_PUBLIC_NETWORK_NAME +ARG NEXT_PUBLIC_DOCKER_NETWORK_NAME +ARG NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME + +RUN NEXT_PUBLIC_UMAMI_ENABLED=true NEXT_PUBLIC_NODE_ENV=production NEXT_PUBLIC_APM_ENABLED=true NEXT_PUBLIC_API_URL=$NEXT_PUBLIC_API_URL NEXT_PUBLIC_MANAGER_BASE_DOMAIN=$NEXT_PUBLIC_MANAGER_BASE_DOMAIN NEXT_PUBLIC_NETWORK_NAME=$NEXT_PUBLIC_NETWORK_NAME NEXT_PUBLIC_DOCKER_NETWORK_NAME=$NEXT_PUBLIC_DOCKER_NETWORK_NAME NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME=$NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME yarn build && rm -rf ./.next/cache FROM node:18-alpine diff --git a/frontend/src/components/Agent/RunnerTutorial.tsx b/frontend/src/components/Agent/RunnerTutorial.tsx index dd39d8965..ad921507d 100644 --- a/frontend/src/components/Agent/RunnerTutorial.tsx +++ b/frontend/src/components/Agent/RunnerTutorial.tsx @@ -1,13 +1,13 @@ import Link from 'next/link'; -import { Copy } from 'lucide-react'; +import {Copy} from 'lucide-react'; -import { SuccessToast } from '@app/components/molecules/CustomToasts'; +import {SuccessToast} from '@app/components/molecules/CustomToasts'; import environments from '@app/configs/environments'; -import { convertToBase64 } from '@app/utils/base64converter'; +import {convertToBase64} from '@app/utils/base64converter'; -const AgentRunnerTutorial = ({ agentSecretKey, showToken }: { agentSecretKey: string; showToken?: boolean }) => { - const dockerCommand = `docker run -d --pull always -e TOKEN=${convertToBase64(agentSecretKey)} cardanoapi/autonomous-agents:${environments.NEXT_PUBLIC_IMAGE_TAG}`; +const AgentRunnerTutorial = ({agentSecretKey, showToken}: { agentSecretKey: string; showToken?: boolean }) => { + const dockerCommand = environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN ? `docker run -d --network=${environments.NEXT_PUBLIC_DOCKER_NETWORK_NAME || 'autonomous_agent'} -e TOKEN=${convertToBase64(agentSecretKey)} -e MANAGER_BASE_DOMAIN=${environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN} ${environments.NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME||'cardanoapi/autonomous-agents:Dev'}` : `docker run -d --pull always -e TOKEN=${convertToBase64(agentSecretKey)} cardanoapi/autonomous-agents:${environments.NEXT_PUBLIC_IMAGE_TAG}`; return (
diff --git a/frontend/src/configs/environments.ts b/frontend/src/configs/environments.ts index 2e3195f0a..671710962 100644 --- a/frontend/src/configs/environments.ts +++ b/frontend/src/configs/environments.ts @@ -68,7 +68,10 @@ export const environments = { APM_ENABLED: process.env.ELASTIC_APM_SERVER_URL && process.env.ELASTIC_APM_SERVICE_NAME, NEXT_PUBLIC_IMAGE_TAG: process.env.NEXT_PUBLIC_IMAGE_TAG ?? 'dev', GOVTOOL_BASE_URL: 'https://govtool.cardanoapi.io', - NEXT_PUBLIC_ENABLE_AGENT_INSTANCE: process.env.NEXT_PUBLIC_ENABLE_AGENT_INSTANCE === 'true' || false + NEXT_PUBLIC_ENABLE_AGENT_INSTANCE: process.env.NEXT_PUBLIC_ENABLE_AGENT_INSTANCE === 'true' || false, + NEXT_PUBLIC_MANAGER_BASE_DOMAIN: process.env.NEXT_PUBLIC_MANAGER_BASE_DOMAIN || '', + NEXT_PUBLIC_DOCKER_NETWORK_NAME: process.env.NEXT_PUBLIC_DOCKER_NETWORK_NAME || '', + NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME: process.env.NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME || '' }; if (typeof window !== 'undefined') { diff --git a/frontend/src/utils/base64converter.ts b/frontend/src/utils/base64converter.ts index 1e25b4d88..e5d22c205 100644 --- a/frontend/src/utils/base64converter.ts +++ b/frontend/src/utils/base64converter.ts @@ -1,7 +1,7 @@ import environments from '@app/configs/environments'; export function convertToBase64(agentSecretKey: string) { - const newSecretKey = environments.network + '_' + agentSecretKey; + const newSecretKey = environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN ? '_' + agentSecretKey : environments.network + '_' + agentSecretKey; const buffer = new Buffer(newSecretKey); return buffer.toString('base64'); } From c3f9e901e734a1b0d0ee25f2d9ac19fe693a02cd Mon Sep 17 00:00:00 2001 From: Sital999 Date: Mon, 13 Jan 2025 10:53:00 +0545 Subject: [PATCH 4/5] enhancement: Update Readme for api and frontend --- README.md | 173 +++++++++++++++++++++++---------------------- api/.env.example | 8 +-- api/README.md | 68 ++++++++---------- frontend/README.md | 30 +++----- 4 files changed, 126 insertions(+), 153 deletions(-) diff --git a/README.md b/README.md index 45accb5d2..7dcaad16c 100644 --- a/README.md +++ b/README.md @@ -1,125 +1,130 @@ -### Welcome to Autonomous Agent Testing -Autonomous Agent Testing primarily focuses on evaluating the features introduced in the Cardano Improvement Proposal (CIP) 1694. -This includes testing the creation and voting mechanisms for proposals to ensure the governance model operates seamlessly. -Additionally, it verifies functionalities like registering and deregistering as a Delegated Representative (DRep), -managing stake registrations and deregistrations, and performing ADA transfers. It also provides the feature to trigger these function either -Manually or by setting a CRON schedule or by event filtering The testing process ensures these operations are secure, efficient, -and align with the decentralized governance objectives of Cardano's Voltaire era. +# Welcome to Autonomous Agent Testing -## Running the deployed service -[Autonomous Agent Testing](https://agents.cardanoapi.io/) +Autonomous Agent Testing focuses on evaluating features introduced in the Cardano Improvement Proposal (CIP) 1694. This includes testing the creation and voting mechanisms for proposals to ensure the governance model operates seamlessly. Additionally, it verifies functionalities such as: -## Running the stack locally +- Registering and deregistering as a Delegated Representative (DRep). +- Managing stake registrations and deregistrations. +- Performing ADA transfers. +- Triggering operations manually, via a CRON schedule, or through event filtering. + +The testing process ensures these operations are secure, efficient, and aligned with the decentralized governance objectives of Cardano's Voltaire era. + +--- + +## Accessing the Deployed Service + +You can access the deployed service here: [Autonomous Agent Testing](https://agents.cardanoapi.io/) + +--- + +## Running the Stack Locally ### Directory Structure -1. `api`: the backend service -2. `manager`: Middleman between agents and backend. Also handles different services for agent -3. `agent-node`: agent for handling various functions -4. `frontend`: UI for autonomous agent testing -5. `dbsync-api`: handling services related to dbsync database + +1. **`api`**: Backend service. +2. **`manager`**: Middleware between agents and the backend; handles various agent-related services. +3. **`agent-node`**: Agent responsible for executing various functions. +4. **`frontend`**: User interface for autonomous agent testing. +5. **`dbsync-api`**: Service for interacting with the dbsync database. ### Using Docker -Before running whole service locally using docker you need to create a `.env` and `.env.dbsync` file from `.env.example` and `.env.dbsync.example` respectively to add environment variables. -Below are some of the descriptions of the environment variables. +Before running the entire service locally using Docker, create `.env` files from `.env.example` and populate them with the necessary environment variables. Below are descriptions of key variables: +> **Note:** Some variables in `.env.example` are prepopulated. Keep them as it is or change them carefully. + +#### Changes to be made in `.env` file + +##### API and Manager + +- **`KAFKA_PREFIX`**: Prefix for Kafka topics. +- **`AGENT_MNEMONIC`**: Seed phrase to generate a wallet. -**Changes to be made in `.env` file** +##### Agent Manager -##### api and manager -- KAFKA_PREFIX - - prefix for kafka topic -- AGENT_MNEMONIC - - Add seed phrase to generate wallet +- **`KUBER_API_KEY`**: Generate an API key from [KuberIde](https://kuberide.com/kuber/settings/api-keys). +- **`MANAGER_WALLET_ADDRESS`** (Optional): Wallet address with sufficient ADA for transfers. +- **`MANAGER_WALLET_SIGNING_KEY`** (Optional): Signing key for the manager wallet. +- **`FAUCET_API_KEY`** (Optional): API key to load ADA for agent transfers if the manager wallet lacks sufficient funds. +- **`BLOCKFROST_API_KEY`** (Required if `ENABLE_BLOCKFROST_SUBMIT_API` is enabled): Obtain from [Blockfrost](https://blockfrost.io/). -##### agent_manager -- KUBER_API_KEY - - Visit [KuberIde](https://kuberide.com/kuber/settings/api-keys) and generate api-key -- MANAGER_WALLET_ADDRESS (OPTIONAL) -- MANAGER_WALLET_SIGNING_KEY (OPTIONAL) - - Add a wallet address having sufficient ADA so that it can be used to transfer ADA to agent when requested -- FAUCET_API_KEY (OPTIONAL) - - Add faucet api key to load ADA which will be used to transfer ADA to agents as per request. And it will only be used if the provided `MANAGER_WALLET_ADDRESS` doesnot have sufficient ADA. -- BLOCKFROST_API_KEY (Required if ENABLE_BLOCKFROST_SUBMIT_API is 'True' or enabled) - - Visit [Blockfrost](https://blockfrost.io/) and sign up and generate api key +> **Note:** If `ENABLE_BLOCKFROST_SUBMIT_API` is not enabled, transactions will be submitted using `Kuber`, which may take a few minutes. -***Note***: environment variable `ENABLE_BLOCKFROST_SUBMIT_API` is preferred as if it is not enabled then `Kuber` will be used to submit the transaction which might take couple of minutes. +##### DBSync -##### dbsync -- DBSYNC_DATABASE_URL - - Add database url of dbsync +- **`DBSYNC_DATABASE_URL`**: URL for the dbsync database. -##### docker network name -- DOCKER_NETWORK_NAME - - Change name for docker network as default value is provided in `.env.example` +##### Docker Network Name -##### agent -- AGENT_NODE_DOCKER_IMAGE_NAME - - Change name for docker network as default value is provided in `.env.example` +- **`DOCKER_NETWORK_NAME`**: Customize the Docker network name (default value provided in `.env.example`). -***Note***: Furthermore all env are setup to run in `Sanchonet` so if you want to run in `Preprod` or `Preview` -Network then following environment variables are to be updated: +##### Agent -**Changes to be made in `.env` file** -##### frontend -- NEXT_PUBLIC_NETWORK_NAME - - preview or preprod +- **`AGENT_NODE_DOCKER_IMAGE_NAME`**: Customize the Docker image name for the agent node. -##### api and manager -- DB_SYNC_BASE_URL - - https://preprod-dbync.agents.cardanoapi.io/api for `preprod` - - https://preview-dbync.agents.cardanoapi.io/api for `preview` +#### Running in `Preprod` or `Preview` Networks -##### manager only -- KUBER_BASE_URL - - https://preview.kuber.cardanoapi.io for `preview` - - https://preprod.kuber.cardanoapi.io for `preprod` +To run in `Preprod` or `Preview` networks, update the following environment variables: -- CARDANO_NETWORK_MAGIC - - 3 for `preview` - - 2 for `preprod` +##### Frontend -- BLOCKFROST_API_KEY - - Visit [Blockfrost](https://blockfrost.io/) and sign up and generate api key based on desired network type +- **`NEXT_PUBLIC_NETWORK_NAME`**: Set to `preview` or `preprod`. -- NETWORK_NAME - - preprod or preview +##### API and Manager -##### dbsync -- DBSYNC_DATABASE_URL - - Update the dbsync database url and database name accordingly +- **`DB_SYNC_BASE_URL`**: + - `https://preprod-dbync.agents.cardanoapi.io/api` for `preprod` + - `https://preview-dbync.agents.cardanoapi.io/api` for `preview` +##### Manager Only -Finally run the given command below: -```shell +- **`KUBER_BASE_URL`**: + - `https://preview.kuber.cardanoapi.io` for `preview` + - `https://preprod.kuber.cardanoapi.io` for `preprod` +- **`CARDANO_NETWORK_MAGIC`**: + - `3` for `preview` + - `2` for `preprod` +- **`BLOCKFROST_API_KEY`**: Obtain from [Blockfrost](https://blockfrost.io/) for the desired network. +- **`NETWORK_NAME`**: Set to `preprod` or `preview`. + +##### DBSync + +- **`DBSYNC_DATABASE_URL`**: Update the URL and database name accordingly. + +#### Starting the Service + +Run the following command: + +```bash docker compose -f docker-compose.dev.yml up -d ``` -**Note** Make sure no application is running on port `3000`, `8000` +> **Note:** Ensure no applications are running on ports `3000` and `8000`. -**Note**: After running the above command line, you can run the agent by following steps: -###### For running agent: -- Visit frontend at `http://localhost:3000` and connect your wallet. -- Then click the `My Agent` tab at bottom left. you will be navigated to `Agents Page` -- In `Overview Tab` click `Run Agent` button at the top right of `Agents Overview Section` -- Now copy the docker command and run it in terminal. And Finally your agent is ready to run. +#### Running the Agent +1. Visit the frontend at `http://localhost:3000` and connect your wallet. +2. Navigate to the `My Agent` tab in the bottom left to access the `Agents Page`. +3. In the `Overview Tab`, click the `Run Agent` button in the top-right corner of the `Agents Overview Section`. +4. Copy the Docker command and run it in the terminal. Your agent is now ready to operate. -### Setup Locally +--- -The setup guide for each services are in the respective directories: - -For running all services locally some of the dependent services like `Kafka`, `Postgresql` can be run via Docker using following command. +### Local Setup +Each service has its own setup guide within its respective directory. For running all services locally, dependencies like `Kafka` and `PostgreSQL` can be run via Docker using the following command: 1. [Backend](api/README.md) 2. [Agent Manager](agent-manager/README.md) 3. [Agent](agent-node/README.md) 4. [Frontend](frontend/README.md) +--- + +## Important -# IMPORTANT +Before committing any changes to the repository, set up the pre-commit hook by running the following command: -Please setup the pre-commit hook before adding any commit for git by running the following command: -```shell +```bash ./install-pre-commit-hook.sh -``` \ No newline at end of file +``` + diff --git a/api/.env.example b/api/.env.example index a3430102d..eb8466153 100644 --- a/api/.env.example +++ b/api/.env.example @@ -1,14 +1,8 @@ -# Environment -# Allowed Values : development , production -APP_ENV=production - DATABASE_URL= -AGENT_MNEMONIC="" +AGENT_MNEMONIC= KAFKA_BROKERS= KAFKA_ENABLED=true DOCS_URL=/api/docs -OPENAPI_URL=/api/openapi.json -DB_SYNC_BASE_URL= KUBER_URL=localhost KAFKA_PREFIX= diff --git a/api/README.md b/api/README.md index 331ac6a44..943e4ef6f 100644 --- a/api/README.md +++ b/api/README.md @@ -8,34 +8,6 @@ Python version : 3.12.2 Poetry version : 1.8.2 -## Docker - -## Setup Guide - -Clone the project - -```bash - git clone https://github.com/sireto/cardano-autonomous-agent -``` - -Change directory - -```bash - cd cardano-autonomous-agent -``` - -Run Docker-Compose . This will setup up the **postgres Database**, **pgadmin4** , **kafka** and **backend** via Docker. - -```bash - docker compose -f "docker-compose.deployment.yml" up --build -d -``` - -After successfully run ,Go to http://0.0.0.0:8000/ , to see the list of api services - -## Locally - -## Setup Guide - #### Prerequisites - Python version: `3.12` or higher @@ -48,9 +20,20 @@ After successfully run ,Go to http://0.0.0.0:8000/ , to see the list of api serv > > - Postgres (Required) > -> - Kafka with Zookeeper (Optional) -> -> - Redis (Optional) +> - Kafka with Zookeeper (Required) + +#### Setup environment variables +Make new file `.env` using `.env.example` and update the environments before running the below steps: + +- **`KAFKA_PREFIX`**: Prefix for Kafka topics. +- **`AGENT_MNEMONIC`**: Seed phrase to generate a wallet. +- **`DOCS_URL`**: Path for swagger docs +- **`KAFKA_ENABLED`**: To enable kafka (Must be enabled by putting value `true` to run the testing agents) +- **`METADATA_BASE_URL`**: Metadata url to fetch metadata of the drep and proposals of different network +- **`DB_SYNC_BASE_URL`**: DbSync url +- **`KAFKA_PREFIX`**: Kafka prefix topic +- **`DATABASE_URL`**: Postgres database url +- **`KAFKA_BROKERS`**: Kafka broker url
@@ -65,6 +48,16 @@ After successfully run ,Go to http://0.0.0.0:8000/ , to see the list of api serv ```shell poetry shell ``` +3. Check if your virtual env is created using python of version `3.12` or higher +> **Note:** Your terminal should have something like this `(backend-py3.12) ` + - If it is not created using python of version `3.12` or higher then create virtual environment again using command + ```shell + poetry env use 3.12 + ``` + - And finally again use command + ```shell + poetry shell + ``` 3. Install Dependencies @@ -74,6 +67,7 @@ After successfully run ,Go to http://0.0.0.0:8000/ , to see the list of api serv 4. Update the environment variables copying it form `.env.example` to `.env` + 5. Run this command for generating the database client and creating the required table mentioned in schema ```shell @@ -81,13 +75,7 @@ After successfully run ,Go to http://0.0.0.0:8000/ , to see the list of api serv prisma migrate dev ``` -## Running the Server - -Activate Poetry venv inside autonomous-agents-api folder by running the following command. - -```bash - poetry shell -``` +> **Note**: You should always activate virtual environment by using command `poetry shell` before running below command Start the server with env variables. @@ -95,6 +83,6 @@ Start the server with env variables. uvicorn backend.app:get_application --port 8000 --reload --env-file .env ``` -Go to http://localhost:8000 +Go to http://localhost:8000/api/docs -You would see the list of API available +You would see the list of available API diff --git a/frontend/README.md b/frontend/README.md index 5ce4a7c66..f62ea1c87 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -1,8 +1,12 @@ -This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). - ## Getting Started -First, run the development server: +First, install the required dependencies for the project using the following command: + +```bash +yarn install +``` + +Once the installation is complete, run one of the commands below to start the development server: ```bash npm run dev @@ -14,23 +18,5 @@ pnpm dev bun dev ``` -Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. - -You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. - -This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font. - -## Learn More - -To learn more about Next.js, take a look at the following resources: - -- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. -- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. - -You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! - -## Deploy on Vercel - -The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. +Open [http://localhost:3000](http://localhost:3000) in your browser to view the application. -Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. From c2c10eff81c92214edec9583650a1dbf9b33a074 Mon Sep 17 00:00:00 2001 From: Sital Nagarkoti Date: Mon, 13 Jan 2025 12:44:52 +0545 Subject: [PATCH 5/5] enhacnement: Update readme and .env.example for all service --- README.md | 14 ++- agent-manager/.env.example | 41 ++++----- agent-manager/README.md | 88 +++++++++++++------ agent-manager/src/controller/health.ts | 6 +- agent-node/.env.example | 4 +- agent-node/README.md | 49 +++-------- .../src/functions/proposalNewConstitution.ts | 12 +-- .../src/functions/treasuryWithdrawal.ts | 12 +-- agent-node/src/index.ts | 6 +- api/.env.example | 17 ++-- api/README.md | 38 ++++---- dbsync-api/.env.example | 5 +- dbsync-api/README.md | 29 ++++++ frontend/.env.example | 1 - frontend/README.md | 10 ++- .../src/components/Agent/RunnerTutorial.tsx | 12 +-- frontend/src/utils/base64converter.ts | 4 +- 17 files changed, 193 insertions(+), 155 deletions(-) create mode 100644 dbsync-api/README.md diff --git a/README.md b/README.md index 7dcaad16c..ca5d3cb1c 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,7 @@ You can access the deployed service here: [Autonomous Agent Testing](https://age 3. **`agent-node`**: Agent responsible for executing various functions. 4. **`frontend`**: User interface for autonomous agent testing. 5. **`dbsync-api`**: Service for interacting with the dbsync database. +--- ### Using Docker @@ -51,7 +52,7 @@ Before running the entire service locally using Docker, create `.env` files from ##### DBSync -- **`DBSYNC_DATABASE_URL`**: URL for the dbsync database. +- **`DBSYNC_DATABASE_URL`**: URL for the `dbsync-api service`. Default running on `http://localhost:9000` on starting `dbsync-api` service. ##### Docker Network Name @@ -89,6 +90,7 @@ To run in `Preprod` or `Preview` networks, update the following environment vari ##### DBSync - **`DBSYNC_DATABASE_URL`**: Update the URL and database name accordingly. +--- #### Starting the Service @@ -100,7 +102,7 @@ docker compose -f docker-compose.dev.yml up -d > **Note:** Ensure no applications are running on ports `3000` and `8000`. -#### Running the Agent +#### Finally Running the Agent 1. Visit the frontend at `http://localhost:3000` and connect your wallet. 2. Navigate to the `My Agent` tab in the bottom left to access the `Agents Page`. @@ -111,12 +113,18 @@ docker compose -f docker-compose.dev.yml up -d ### Local Setup -Each service has its own setup guide within its respective directory. For running all services locally, dependencies like `Kafka` and `PostgreSQL` can be run via Docker using the following command: +Each service has its own setup guide within its respective directory. 1. [Backend](api/README.md) 2. [Agent Manager](agent-manager/README.md) 3. [Agent](agent-node/README.md) 4. [Frontend](frontend/README.md) +5. [DbSync-Api](dbsync-api/README.md) + +**`Note`**: For running all services locally, dependencies like `Kafka` and `PostgreSQL` can be run via Docker using the following command: +```bash +docker compose -f docker-compose.dev.yml up -d +``` --- diff --git a/agent-manager/.env.example b/agent-manager/.env.example index f5f626be0..d1a4cbb0b 100644 --- a/agent-manager/.env.example +++ b/agent-manager/.env.example @@ -1,34 +1,31 @@ -KAFKA_BROKERS=127.0.0.1:9092 -KAFKA_CLIENT_ID= -KAFKA_PREFIX=local +# Kafka +KAFKA_BROKERS= +KAFKA_TOPIC_PREFIX= +KAFKA_CONSUMER_GROUP= +CLIENT_ID= -CARDANO_NODE_URL= -CARDANO_NETWORK_MAGIC=4 -KUBER_BASE_URL= +# Cardano +CARDANO_NODE_URL=172.31.0.4:3004 +KUBER_BASE_URL='https://sanchonet.kuber.cardanoapi.io' KUBER_API_KEY= +METADATA_BASE_URL='https://metadata.drep.id' +DB_SYNC_BASE_URL= +CARDANO_NETWORK_MAGIC=4 BLOCKFROST_API_KEY= -ENABLE_BLOCKFROST_SUBMIT_API= +ENABLE_BLOCKFROST_SUBMIT_API='True' -DATABASE_URL=postgresql://root:root@localhost:5432/cardano_autonomous_agent_testing_db - +# Wallet MANAGER_WALLET_ADDRESS= MANAGER_WALLET_SIGNING_KEY= - -SANCHONET_FAUCET_API_KEY= - +FAUCET_API_KEY= AGENT_MNEMONIC= -METADATA_BASE_URL= -METADATA_FETCH_BASE_URL= - -DB_SYNC_BASE_URL= - -SERVER_PORT= -NETWORK_NAME= - +# Database +DATABASE_URL= -ELASTIC_APM_SERVER_URL=https://apm.sireto.io -ELASTIC_APM_API_KEY=XXX \ No newline at end of file +# Server +SERVER_PORT=3002 +NETWORK_NAME=sanchonet \ No newline at end of file diff --git a/agent-manager/README.md b/agent-manager/README.md index 13ec4b64f..2df5967de 100644 --- a/agent-manager/README.md +++ b/agent-manager/README.md @@ -1,26 +1,25 @@ # Agent Manager Application -This project is a TypeScript Agent Manager application where Agents are connected to it through websocket . +This project is a TypeScript Agent Manager application where agents are connected to it through websocket. ## Table of Contents - [Requirements](#requirements) - [Installation](#installation) -- [Usage](#usage) -- [Development](#development) ## Requirements - [Node.js](https://nodejs.org/) (v18.18.0 or higher) - [yarn](https://yarnpkg.com/) package manager +- `kafka service` +- `postgres server` ## Installation -1. Clone the repository: +1. Go to the agent-manager folder (If in root folder) ```shell - git clone https://github.com/sireto/cardano-autonomous-agent.git - cd cardano-autonomous-agent/agent-manager + cd agent-manager ``` 2. Install dependencies using yarn: @@ -31,42 +30,73 @@ This project is a TypeScript Agent Manager application where Agents are connecte ## Usage -Copy the env variables form `.env.example` to `.env` and update the env variables. +Create new file `.env` and copy env variables form `.env.example` to `.env` and update the env variables. -Make sure to run the following command to generate the database client and creating the required table mentioned in schema +#### Setup environment variables -```bash -yarn prisma generate -``` +#### Kafka Configuration -### Development Mode +- **`KAFKA_CONSUMER_GROUP`**: Kafka consumer group name. +- **`CLIENT_ID`**: Unique client ID for Kafka. +- **`KAFKA_PREFIX`**: Prefix for Kafka topics. +- **`KAFKA_BROKERS`**: Kafka broker URL. Specify either a locally running Kafka URL (e.g., on Docker) or a deployed Kafka service URL. -To run the application in dev mode run the following command +#### Cardano Configuration -```shell -yarn dev -``` +- **`CARDANO_NODE_URL`**: `172.31.0.4:3004` - URL for the Cardano node. +- **`KUBER_BASE_URL`**: `'https://sanchonet.kuber.cardanoapi.io'` - Base URL for Kuber's Cardano API. +- **`KUBER_API_KEY`**: API key for accessing Kuber services. Generate an API key from [KuberIde](https://kuberide.com/kuber/settings/api-keys). +- **`METADATA_BASE_URL`**: Metadata URL to fetch information about dReps and proposals across different networks. (Default provided in `.env.example`) +- **`DB_SYNC_BASE_URL`**: URL for the `dbsync-api service`. Default running on `http://localhost:9000` on starting `dbsync-api` service. +- **`CARDANO_NETWORK_MAGIC`**: `4` - Network magic for the Cardano testnet(Sanchonet). +- **`BLOCKFROST_API_KEY`** (Optional): API key for accessing the Blockfrost API. (Required if `ENABLE_BLOCKFROST_SUBMIT_API` is enabled): Obtain from [Blockfrost](https://blockfrost.io/). +- **`ENABLE_BLOCKFROST_SUBMIT_API`** (Optional): `'True'` - Enable or disable Blockfrost transaction submission API. + > **Note:** If `ENABLE_BLOCKFROST_SUBMIT_API` is not enabled, transactions will be submitted using `Kuber`, which may take a few minutes. -### Production Mode +#### Wallet Configuration -To run the Agent Manager application, follow these steps: +- **`MANAGER_WALLET_ADDRESS`** (Optional): Wallet address with sufficient ADA for transfers. +- **`MANAGER_WALLET_SIGNING_KEY`** (Optional): Signing key for the manager wallet. +- **`FAUCET_API_KEY`** (Optional): API key to load ADA for agent transfers if the manager wallet lacks sufficient funds. +- **`AGENT_MNEMONIC`**: Seed phrase used to generate a wallet. -1. Build the application using the following command: +#### Database Configuration - ```shell - yarn build - ``` +- **`DATABASE_URL`**: PostgreSQL database URL. Specify either a local Docker-based database or a deployed database URL. - This will compile the TypeScript files into JavaScript and place the output in the `dist` directory. +#### Server Configuration -2. Run the application with an agent ID as a command-line argument: +- **`SERVER_PORT`** (OPTIONAL): `3002` - Port number for the server. (Default port is 3001) +- **`NETWORK_NAME`**: `sanchonet` - Name of the Cardano network. - ```shell - yarn start - ``` +After updating environment variables make sure to run the following command to generate the database client and creating the required table mentioned in schema -Make sure your API service is up and running . +```bash +yarn prisma generate +``` -If successful a server listening on port `3000` will be running: +Now finally run the below command to start the manager: + +```bash +yarn dev +``` + +If successful a server listening on mentioned PORT will be running: > http://localhost:3001 + +## Running in `Preprod` or `Preview` Networks + +To run in `Preprod` or `Preview` networks, update the following environment variables: + +- **`DB_SYNC_BASE_URL`**: + - `https://preprod-dbync.agents.cardanoapi.io/api` for `preprod` + - `https://preview-dbync.agents.cardanoapi.io/api` for `preview` +- **`KUBER_BASE_URL`**: + - `https://preview.kuber.cardanoapi.io` for `preview` + - `https://preprod.kuber.cardanoapi.io` for `preprod` +- **`CARDANO_NETWORK_MAGIC`**: + - `3` for `preview` + - `2` for `preprod` +- **`BLOCKFROST_API_KEY`**: Obtain from [Blockfrost](https://blockfrost.io/) for the desired network. +- **`NETWORK_NAME`**: Set to `preprod` or `preview`. diff --git a/agent-manager/src/controller/health.ts b/agent-manager/src/controller/health.ts index 873582b28..1a9b7d2e7 100644 --- a/agent-manager/src/controller/health.ts +++ b/agent-manager/src/controller/health.ts @@ -58,9 +58,9 @@ async function healthCheck(req: Request, res: Response) { database: { isHealthy: isDatabaseHealthy, }, - metadata:{ - isHealthy:isMetadataHealthy - } + metadata: { + isHealthy: isMetadataHealthy, + }, }, }) } catch (err: any) { diff --git a/agent-node/.env.example b/agent-node/.env.example index 974c87176..02499e2f2 100644 --- a/agent-node/.env.example +++ b/agent-node/.env.example @@ -1,4 +1,2 @@ -WS_URL=ws://localhost:3001 -AGENT_SECRET_KEY= -NETWORK= TOKEN= +WS_URL=ws://localhost:3001 \ No newline at end of file diff --git a/agent-node/README.md b/agent-node/README.md index fa9131093..b922b77c2 100644 --- a/agent-node/README.md +++ b/agent-node/README.md @@ -2,61 +2,36 @@ This project is a TypeScript client application that connects to a server via WebSocket and processes configurations sent by the server. It can schedule and trigger functions based on received configurations. -## Table of Contents - -- [Requirements](#requirements) -- [Installation](#installation) -- [Usage](#usage) -- [Development](#development) - ## Requirements - [Node.js](https://nodejs.org/) (v18.18.0 or higher) - [yarn](https://yarnpkg.com/) package manager +- `Agent-Manager` service +- `Fronted` service +- `Backend` service +- `DbSync-api` service ## Installation -1. Clone the repository: - - ```shell - git clone https://github.com/sireto/cardano-autonomous-agent.git - cd cardano-autonomous-agent/agent-node - ``` - -2. Install dependencies using npm or yarn: +1. Install dependencies using npm or yarn: ```shell yarn install ``` -## Usage +2. Create new file `.env` and copy env variables form `.env.example` to `.env` and update the env variables. ### Setting up environment variables -Copy environment variables from `.env.example` to `.env` and update them as necessary. +- **`TOKEN`**: Run the frontend and visit `My Agent` tab from left bottom section of the page. Then click `Run Agent` button on top right of the `Agent Overview` section. Copy the token part only and paste it in env. +- **`WS_URL`**: `agent-manager` websocket url . Default on `ws://localhost:3001' -> **Note**: AGENT_ID if the ID of the agent created with API. +**`Note`** - Remember to add `ws` as protocol in `WS_URL` instead of `http`. -### Development Mode +Copy environment variables from `.env.example` to `.env` and update them as necessary. -To run the application in dev mode run the following command +Finally run the agent by running below command. ```shell -yarn dev + yarn dev ``` - -### Production Mode - -1. Build the application using the following command: - - ```shell - yarn build - ``` - - This will compile the TypeScript files into JavaScript and place the output in the `dist` directory. - -2. Run the application with an agent ID as a command-line argument: - - ```shell - yarn start - ``` diff --git a/agent-node/src/functions/proposalNewConstitution.ts b/agent-node/src/functions/proposalNewConstitution.ts index dfe16ac33..81ed8fbbe 100644 --- a/agent-node/src/functions/proposalNewConstitution.ts +++ b/agent-node/src/functions/proposalNewConstitution.ts @@ -25,17 +25,17 @@ export default async function handler( return await context.wallet .buildAndSubmit(req, true) .then((v) => v) - .catch(async(e) => { + .catch(async (e) => { if (e.includes('ProposalReturnAccountDoesNotExist')) { await context.builtins.registerStake().catch((e) => { throw e }) return context.wallet - .buildAndSubmit(req) - .then((v) => v) - .catch((e) => { - throw e - }) + .buildAndSubmit(req) + .then((v) => v) + .catch((e) => { + throw e + }) } else { throw e } diff --git a/agent-node/src/functions/treasuryWithdrawal.ts b/agent-node/src/functions/treasuryWithdrawal.ts index 6b5fc787d..bf7971268 100644 --- a/agent-node/src/functions/treasuryWithdrawal.ts +++ b/agent-node/src/functions/treasuryWithdrawal.ts @@ -33,17 +33,17 @@ export default async function handler( }, ], } - return await context.wallet.buildAndSubmit(req, false).catch(async(e)=>{ + return await context.wallet.buildAndSubmit(req, false).catch(async (e) => { if (e.includes('ProposalReturnAccountDoesNotExist')) { await context.builtins.registerStake().catch((e) => { throw e }) return context.wallet - .buildAndSubmit(req) - .then((v) => v) - .catch((e) => { - throw e - }) + .buildAndSubmit(req) + .then((v) => v) + .catch((e) => { + throw e + }) } else { throw e } diff --git a/agent-node/src/index.ts b/agent-node/src/index.ts index a9d2c6817..0e61049e4 100644 --- a/agent-node/src/index.ts +++ b/agent-node/src/index.ts @@ -36,11 +36,9 @@ if (!wsUrl) { if (network && managerBaseDomain) { // This is set in docker file wsUrl = `wss://${network.toLowerCase()}.${managerBaseDomain}` - } - else if (managerBaseDomain){ + } else if (managerBaseDomain) { wsUrl = `ws://${managerBaseDomain}` - } - else { + } else { wsUrl = 'ws://localhost:3001' } } diff --git a/api/.env.example b/api/.env.example index eb8466153..041a74a35 100644 --- a/api/.env.example +++ b/api/.env.example @@ -1,15 +1,8 @@ DATABASE_URL= -AGENT_MNEMONIC= +DOCS_URL='/api/docs' KAFKA_BROKERS= +AGENT_MNEMONIC= KAFKA_ENABLED=true -DOCS_URL=/api/docs - -KUBER_URL=localhost -KAFKA_PREFIX= - -JWT_SECRET_KEY="" - -METADATA_BASE_URL='' -DB_SYNC_BASE_URL='' - -ELASTIC_APM_SERVER_URL=https://apm.sireto.io +METADATA_BASE_URL='https://metadata.drep.id/api' +DB_SYNC_BASE_URL= +KAFKA_PREFIX= \ No newline at end of file diff --git a/api/README.md b/api/README.md index 943e4ef6f..1cf45e81a 100644 --- a/api/README.md +++ b/api/README.md @@ -13,6 +13,9 @@ Poetry version : 1.8.2 - Python version: `3.12` or higher - Poetry version: `1.8.3` or higher - Pip version: `24.0.0` or higher +- `kafka service` +- `postgres server` + #### Steps @@ -20,20 +23,7 @@ Poetry version : 1.8.2 > > - Postgres (Required) > -> - Kafka with Zookeeper (Required) - -#### Setup environment variables -Make new file `.env` using `.env.example` and update the environments before running the below steps: - -- **`KAFKA_PREFIX`**: Prefix for Kafka topics. -- **`AGENT_MNEMONIC`**: Seed phrase to generate a wallet. -- **`DOCS_URL`**: Path for swagger docs -- **`KAFKA_ENABLED`**: To enable kafka (Must be enabled by putting value `true` to run the testing agents) -- **`METADATA_BASE_URL`**: Metadata url to fetch metadata of the drep and proposals of different network -- **`DB_SYNC_BASE_URL`**: DbSync url -- **`KAFKA_PREFIX`**: Kafka prefix topic -- **`DATABASE_URL`**: Postgres database url -- **`KAFKA_BROKERS`**: Kafka broker url +> - Kafka (Required)
@@ -52,12 +42,12 @@ Make new file `.env` using `.env.example` and update the environments before run > **Note:** Your terminal should have something like this `(backend-py3.12) ` - If it is not created using python of version `3.12` or higher then create virtual environment again using command ```shell - poetry env use 3.12 - ``` + poetry env use 3.12 + ``` - And finally again use command ```shell poetry shell - ``` + ``` 3. Install Dependencies @@ -65,7 +55,19 @@ Make new file `.env` using `.env.example` and update the environments before run poetry install ``` -4. Update the environment variables copying it form `.env.example` to `.env` +4. Make new file `.env` using `.env.example` and update the environments before running the below steps: + +#### Setup environment variables + +- **`KAFKA_PREFIX`**: Prefix for Kafka topics. +- **`AGENT_MNEMONIC`**: Seed phrase to generate a wallet. +- **`DOCS_URL`**: Path for swagger docs +- **`KAFKA_ENABLED`**: To enable kafka (Must be enabled by putting value `true` to run the testing agents) +- **`METADATA_BASE_URL`**: Metadata url to fetch metadata of the drep and proposals of different network. (Default provided in `.env.example`) +- **`DB_SYNC_BASE_URL`**: URL for the `dbsync-api service`. Default running on `http://localhost:9000` on starting `dbsync-api` service. +- **`KAFKA_PREFIX`**: Kafka prefix topic +- **`DATABASE_URL`**: Postgres database url. Specify either a locally running Postgres database URL (e.g., on Docker) or a deployed Postgres database URL. +- **`KAFKA_BROKERS`**: Kafka broker URL. Specify either a locally running Kafka URL (e.g., on Docker) or a deployed Kafka service URL. 5. Run this command for generating the database client and creating the required table mentioned in schema diff --git a/dbsync-api/.env.example b/dbsync-api/.env.example index 95abce2b9..017d16871 100644 --- a/dbsync-api/.env.example +++ b/dbsync-api/.env.example @@ -1,4 +1,3 @@ -DATABASE_URL=postgres://sudipbhattarai@localhost/dbsync_sanchonet -NODE_ENV=prod +DATABASE_URL= CORS_ENABLE=true -PORT=3001 \ No newline at end of file +PORT=9000 \ No newline at end of file diff --git a/dbsync-api/README.md b/dbsync-api/README.md new file mode 100644 index 000000000..354d34a04 --- /dev/null +++ b/dbsync-api/README.md @@ -0,0 +1,29 @@ +# dbsync-api Service + +The `dbsync-api` service is a Node.js application designed to provide API access to interact with the `dbsync` database. This service enables efficient data retrieval for Cardano-related operations. + +## Requirements +Before running the service, ensure the following dependencies are installed: +- PostgreSQL server + +## Installation +1. Run this command for package installation + ```shell + yarn install + ``` +2. Create new file `.env` and copy env variables form `.env.example` to `.env` and update the env variables. +- **`DATABASE_URL`**: PostgreSQL database URL for dbsync_sanchonet. For `preprod` and `preview` update database instance accordingly. +- **`PORT`**: Port for running the server. Default value is 8080 +- **`CORS_ENABLE`**: CORS support for cross-origin requests. + +3. Run the following command to generate the database client and creating the required table mentioned in schema + ```bash + yarn prisma generate + ``` + +4. Now finally run the below command to start the `dbsync-api` service: + ```bash + yarn dev + ``` + +Now goto `http://localhost:8080/api/docs` to see list of api in `swaggerDocs`. diff --git a/frontend/.env.example b/frontend/.env.example index 548323932..b1b656d47 100644 --- a/frontend/.env.example +++ b/frontend/.env.example @@ -1,4 +1,3 @@ -API_URL = 'http://localhost:8000/api' # internal api url NEXT_PUBLIC_API_URL = # api url accessed from browser NEXT_PUBLIC_NETWORK_NAME = sanchonet # cardano network on which the platform is running NEXT_PUBLIC_ENABLE_AGENT_INSTANCE=false \ No newline at end of file diff --git a/frontend/README.md b/frontend/README.md index f62ea1c87..f600741a1 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -6,7 +6,14 @@ First, install the required dependencies for the project using the following com yarn install ``` -Once the installation is complete, run one of the commands below to start the development server: +Once the installation is complete create new file `.env` and copy env variables form `.env.example` to `.env` and update the env variables. + +- **`NEXT_PUBLIC_NETWORK_NAME`**: Set to`sanchonet` or `preview` or `preprod`. + **`Note`**: It will only display network type in web app. You need to update `agent-manager`, `backend` and `dbsync-api` services to change thw working of functions in other networks. +- - **`NEXT_PUBLIC_ENABLE_AGENT_INSTANCE`**: Enable it by adding `true` to run `multiple instances of single agent` feature where same type of functions will be executed by multiple instance of agent. To use this feature you also need to increase the instance number from `Agent Overview ` section. +- - **`NEXT_PUBLIC_API_URL`**: `Backend ` service url accessed from browser. + +##### Finally run one of the commands below to start the development server: ```bash npm run dev @@ -19,4 +26,3 @@ bun dev ``` Open [http://localhost:3000](http://localhost:3000) in your browser to view the application. - diff --git a/frontend/src/components/Agent/RunnerTutorial.tsx b/frontend/src/components/Agent/RunnerTutorial.tsx index ad921507d..c91159850 100644 --- a/frontend/src/components/Agent/RunnerTutorial.tsx +++ b/frontend/src/components/Agent/RunnerTutorial.tsx @@ -1,13 +1,15 @@ import Link from 'next/link'; -import {Copy} from 'lucide-react'; +import { Copy } from 'lucide-react'; -import {SuccessToast} from '@app/components/molecules/CustomToasts'; +import { SuccessToast } from '@app/components/molecules/CustomToasts'; import environments from '@app/configs/environments'; -import {convertToBase64} from '@app/utils/base64converter'; +import { convertToBase64 } from '@app/utils/base64converter'; -const AgentRunnerTutorial = ({agentSecretKey, showToken}: { agentSecretKey: string; showToken?: boolean }) => { - const dockerCommand = environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN ? `docker run -d --network=${environments.NEXT_PUBLIC_DOCKER_NETWORK_NAME || 'autonomous_agent'} -e TOKEN=${convertToBase64(agentSecretKey)} -e MANAGER_BASE_DOMAIN=${environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN} ${environments.NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME||'cardanoapi/autonomous-agents:Dev'}` : `docker run -d --pull always -e TOKEN=${convertToBase64(agentSecretKey)} cardanoapi/autonomous-agents:${environments.NEXT_PUBLIC_IMAGE_TAG}`; +const AgentRunnerTutorial = ({ agentSecretKey, showToken }: { agentSecretKey: string; showToken?: boolean }) => { + const dockerCommand = environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN + ? `docker run -d --network=${environments.NEXT_PUBLIC_DOCKER_NETWORK_NAME || 'autonomous_agent'} -e TOKEN=${convertToBase64(agentSecretKey)} -e MANAGER_BASE_DOMAIN=${environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN} ${environments.NEXT_PUBLIC_AGENT_NODE_DOCKER_IMAGE_NAME || 'cardanoapi/autonomous-agents:Dev'}` + : `docker run -d --pull always -e TOKEN=${convertToBase64(agentSecretKey)} cardanoapi/autonomous-agents:${environments.NEXT_PUBLIC_IMAGE_TAG}`; return (
diff --git a/frontend/src/utils/base64converter.ts b/frontend/src/utils/base64converter.ts index e5d22c205..64396b632 100644 --- a/frontend/src/utils/base64converter.ts +++ b/frontend/src/utils/base64converter.ts @@ -1,7 +1,9 @@ import environments from '@app/configs/environments'; export function convertToBase64(agentSecretKey: string) { - const newSecretKey = environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN ? '_' + agentSecretKey : environments.network + '_' + agentSecretKey; + const newSecretKey = environments.NEXT_PUBLIC_MANAGER_BASE_DOMAIN + ? '_' + agentSecretKey + : environments.network + '_' + agentSecretKey; const buffer = new Buffer(newSecretKey); return buffer.toString('base64'); }