Skip to content

Commit ba6133a

Browse files
committed
fix: Update docker compose for readme
1 parent 737887c commit ba6133a

File tree

11 files changed

+158
-91
lines changed

11 files changed

+158
-91
lines changed

README.md

Lines changed: 56 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,62 @@ and align with the decentralized governance objectives of Cardano's Voltaire era
2020

2121
### Using Docker
2222

23-
Setup the required dependencies by running the command locally.
23+
Before running whole service locally using docker you need to add few of the environment variables
24+
on file `docker-compose.dev.yml`
25+
26+
##### api
27+
- AGENT_MNEMONIC
28+
- Add seed phrase to generate wallet
29+
30+
##### agent_manager
31+
- KUBER_API_KEY
32+
- Visit [KuberIde](https://kuberide.com/kuber/settings/api-keys) and generate api-key
33+
- MANAGER_WALLET_ADDRESS (OPTIONAL)
34+
- MANAGER_WALLET_SIGNING_KEY (OPTIONAL)
35+
- Add a wallet address having sufficient ADA so that it can be used to transfer ADA to agent when requested
36+
- FAUCET_API_KEY (OPTIONAL)
37+
- Add faucet api key to load ADA which will be used to transfer ADA to agents as per request. And it will only be used if the provided `MANAGER_WALLET_ADDRESS` doesnot have sufficient ADA.
38+
- AGENT_MNEMONIC
39+
- Add seed phrase to generate wallet that should be same as added in `api`
40+
- BLOCKFROST_API_KEY (Required if ENABLE_BLOCKFROST_SUBMIT_API is 'True' or enabled)
41+
- Visit [Blockfrost](https://blockfrost.io/) and sign up and generate api key
42+
43+
Note: environment variable `ENABLE_BLOCKFROST_SUBMIT_API` is preferred as if it is not enabled then `Kuber` will be used to submit the transaction which might take couple of minutes.
44+
45+
##### dbsync
46+
- DATABASE_URL
47+
- Add database url of dbsync
48+
49+
Furthermore all env are setup to run in `Sanchonet` so if you want to run in `Preprod` or `Preview`
50+
Network then following environment variables are to be updated:
51+
52+
##### frontend
53+
- NEXT_PUBLIC_NETWORK_NAME
54+
- preview or preprod
55+
56+
##### api and manager
57+
- DB_SYNC_BASE_URL
58+
- https://preprod-dbync.agents.cardanoapi.io/api for `preprod`
59+
- https://preview-dbync.agents.cardanoapi.io/api for `preview`
60+
61+
##### manager only
62+
- KUBER_BASE_URL
63+
- https://preview.kuber.cardanoapi.io for `preview`
64+
- https://preprod.kuber.cardanoapi.io for `preprod`
65+
66+
- CARDANO_NETWORK_MAGIC
67+
- 3 for `preview`
68+
- 2 for `preprod`
69+
70+
- BLOCKFROST_API_KEY
71+
- Visit [Blockfrost](https://blockfrost.io/) and sign up and generate api key based on desired network type
72+
73+
- NETWORK_NAME
74+
- preprod or preview
75+
76+
##### dbsync
77+
- DATABASE_URL
78+
- Update the dbsync database url and database name accordingly
2479

2580
```shell
2681
docker compose -f docker-compose.dev.yml up -d

api/.env.example

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ KAFKA_PREFIX=
1515

1616
JWT_SECRET_KEY=""
1717

18-
METADATA_API=''
19-
DB_SYNC_API=''
18+
METADATA_BASE_URL=''
19+
DB_SYNC_BASE_URL=''
2020

2121
ELASTIC_APM_SERVER_URL=https://apm.sireto.io

api/backend/app/controllers/health.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ async def readiness_check():
7373

7474
async def dbsync_health_check():
7575
async with aiohttp.ClientSession() as session:
76-
async with session.get(api_settings.DB_SYNC_API + "/health") as response:
76+
async with session.get(api_settings.DB_SYNC_BASE_URL + "/health") as response:
7777
if response.status == 200:
7878
return True
7979
return False

api/backend/app/controllers/internal/metadata_router.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,13 @@ class MetadataRouter(Routable):
99

1010
def __init__(self):
1111
super().__init__()
12-
self.metadata_api = APISettings().METADATA_API
12+
self.metadata_base_url = APISettings().METADATA_BASE_URL
1313

1414
@get("/metadata")
1515
async def fetch_metadata(self, metadata_url: str):
1616
async with aiohttp.ClientSession() as session:
1717
async with session.get(
18-
f"{self.metadata_api}/metadata?url={metadata_url}&hash=1111111111111111111111111111111111111111111111111111111111111112"
18+
f"{self.metadata_base_url}/metadata?url={metadata_url}&hash=1111111111111111111111111111111111111111111111111111111111111112"
1919
) as resp:
2020
response = await resp.json()
2121
if resp.ok:

api/backend/app/services/agent_service.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,7 @@ async def fetch_data(self, url, session: ClientSession):
205205
raise HTTPException(status_code=400, content="Error fetching agent Drep details")
206206

207207
async def fetch_balance(self, stake_address: str, session: ClientSession):
208-
async with session.get(f"{api_settings.DB_SYNC_API}/address/balance?address={stake_address}") as response:
208+
async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/address/balance?address={stake_address}") as response:
209209
try:
210210
return await response.json()
211211
except:
@@ -215,7 +215,7 @@ async def fetch_balance(self, stake_address: str, session: ClientSession):
215215
)
216216

217217
async def fetch_drep_details(self, drep_id: str, session: ClientSession) -> Dict[str, float | bool]:
218-
async with session.get(f"{api_settings.DB_SYNC_API}/drep/{drep_id}") as response:
218+
async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/drep/{drep_id}") as response:
219219
try:
220220
res = await response.json()
221221
voting_power = res.get("votingPower") / (10**6) if res.get("votingPower") else 0
@@ -228,7 +228,7 @@ async def fetch_drep_details(self, drep_id: str, session: ClientSession) -> Dict
228228
)
229229

230230
async def fetch_stake_address_details(self, stake_address: str, session: ClientSession):
231-
async with session.get(f"{api_settings.DB_SYNC_API}/stake-address?address={stake_address}") as response:
231+
async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/stake-address?address={stake_address}") as response:
232232
try:
233233
is_stake_registered = False
234234
res = await response.json()
@@ -250,7 +250,7 @@ async def fetch_stake_address_details(self, stake_address: str, session: ClientS
250250
)
251251

252252
async def fetch_delegation_details(self, stake_address: str, session: ClientSession):
253-
async with session.get(f"{api_settings.DB_SYNC_API}/delegation?address={stake_address}") as response:
253+
async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/delegation?address={stake_address}") as response:
254254
try:
255255
res = await response.json()
256256
drep_id = res.get("drep", {}).get("drep_id") if res.get("drep") else None

api/backend/app/services/drep_service.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ async def fetch_internal_dreps(self, page: int, page_size: int, search: str | No
5858
async def fetch_metadata(self, agent: Agent, index: int, agents: list[Any], session: ClientSession):
5959
drep_dict = {}
6060
drep_id = convert_base64_to_hex(agent.wallet_details[0].stake_key_hash)
61-
async with session.get(f"{api_settings.DB_SYNC_API}/drep?search={drep_id}") as response:
61+
async with session.get(f"{api_settings.DB_SYNC_BASE_URL}/drep?search={drep_id}") as response:
6262
response_json = await response.json()
6363
if response_json["items"]:
6464
if drep_id == response_json["items"][0]["drepId"]:
@@ -68,7 +68,7 @@ async def fetch_metadata(self, agent: Agent, index: int, agents: list[Any], sess
6868
metadata_hash = drep_dict.get("metadataHash")
6969
try:
7070
async with session.get(
71-
f"{api_settings.METADATA_API}/metadata?url={url}&hash={metadata_hash}"
71+
f"{api_settings.METADATA_BASE_URL}/metadata?url={url}&hash={metadata_hash}"
7272
) as metadata_resp:
7373
metadata_resp_json = await metadata_resp.json()
7474
if "hash" in metadata_resp_json:
@@ -84,9 +84,9 @@ async def fetch_metadata(self, agent: Agent, index: int, agents: list[Any], sess
8484

8585
async def fetch_external_dreps(self, page: int, page_size: int, search: str | None):
8686
if search:
87-
fetchUrl = f"{api_settings.DB_SYNC_API}/drep?search={search}"
87+
fetchUrl = f"{api_settings.DB_SYNC_BASE_URL}/drep?search={search}"
8888
else:
89-
fetchUrl = f"{api_settings.DB_SYNC_API}/drep?page={page}&size={page_size}"
89+
fetchUrl = f"{api_settings.DB_SYNC_BASE_URL}/drep?page={page}&size={page_size}"
9090

9191
async with aiohttp.ClientSession() as session:
9292
async with session.get(fetchUrl) as response:
@@ -129,7 +129,7 @@ async def fetch_metadata_for_drep(self, metadata_hash: str, url: str, drep: Any)
129129
try:
130130
async with aiohttp.ClientSession() as session:
131131
async with session.get(
132-
f"{api_settings.METADATA_API}/metadata?url={url}&hash={metadata_hash}"
132+
f"{api_settings.METADATA_BASE_URL}/metadata?url={url}&hash={metadata_hash}"
133133
) as metadata_resp:
134134
metadata_resp_json = await metadata_resp.json()
135135
if "hash" in metadata_resp_json:

api/backend/app/services/proposal_service.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ async def get_internal_proposals(self, page: int = 1, pageSize: int = 10, search
6666
async def add_metadata_and_agent_detail_in_internal_proposal(
6767
self, proposal: TriggerHistoryDto, index: int, results: list[Any]
6868
):
69-
url = f"{api_settings.DB_SYNC_API}/proposal?proposal={proposal.txHash}"
69+
url = f"{api_settings.DB_SYNC_BASE_URL}/proposal?proposal={proposal.txHash}"
7070
proposal_data = await self._fetch_proposal_data(url)
7171
if not proposal_data:
7272
results[index] = ""
@@ -83,9 +83,9 @@ async def add_metadata_and_agent_detail_in_internal_proposal(
8383
results[index] = proposal_dict
8484

8585
async def get_external_proposals(self, page: int, pageSize: int, sort: str, search: str | None = None):
86-
search_url = f"{api_settings.DB_SYNC_API}/proposal?page={page}&size={pageSize}&sort={sort}"
86+
search_url = f"{api_settings.DB_SYNC_BASE_URL}/proposal?page={page}&size={pageSize}&sort={sort}"
8787
if search:
88-
search_url = f"{api_settings.DB_SYNC_API}/proposal?proposal={search}"
88+
search_url = f"{api_settings.DB_SYNC_BASE_URL}/proposal?proposal={search}"
8989

9090
async with aiohttp.ClientSession() as session:
9191
async with session.get(search_url) as response:
@@ -126,7 +126,7 @@ async def _fetch_metadata(self, metadata_hash: str, url: str, proposal_dict: Any
126126
try:
127127
async with aiohttp.ClientSession() as session:
128128
async with session.get(
129-
f"{api_settings.METADATA_API}/metadata?url={url}&hash={metadata_hash}"
129+
f"{api_settings.METADATA_BASE_URL}/metadata?url={url}&hash={metadata_hash}"
130130
) as metadata_resp:
131131
metadata_resp_json = await metadata_resp.json()
132132
if "hash" in metadata_resp_json:

api/backend/config/api_settings.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@ class APISettings(BaseSettings):
55
APP_ENV: str = "production"
66
SECURE: bool = None
77
JWT_SECRET_KEY: str = ""
8-
DB_SYNC_API: str = "https://dbsyncapi.agents.cardanoapi.io/api"
8+
DB_SYNC_BASE_URL: str = "https://dbsyncapi.agents.cardanoapi.io/api"
99
SAME_SITE = "None"
10-
METADATA_API: str = "https://metadata.drep.id/api"
10+
METADATA_BASE_URL: str = "https://metadata.drep.id/api"
1111
GOV_ACTION_API: str = "https://govtool.cardanoapi.io/api"
1212
KAFKA_TOPIC_PREFIX: str = ""
1313
KAFKA_PREFIX: str = ""

docker-compose.dev.yml

Lines changed: 19 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ services:
2525
- DOCS_URL=/api/docs
2626
- AGENT_MNEMONIC='group shadow belt culture garage sport demand target twist tribe milk dumb divide mango bench fantasy okay unfair error twice struggle naive squirrel romance'
2727
- KAFKA_ENABLED='True'
28-
- METADATA_API='https://metadata.drep.id/api'
29-
- DB_SYNC_API='https://dbsyncapi.agents.cardanoapi.io/api'
28+
- METADATA_BASE_URL='https://metadata.drep.id/api'
29+
- DB_SYNC_BASE_URL='http://dbsync:9000/api'
3030
depends_on:
3131
- postgres
3232
- kafka
@@ -49,10 +49,10 @@ services:
4949
- CLIENT_ID=my-app
5050
- CARDANO_NODE_URL=172.31.0.4:3004
5151
- KUBER_BASE_URL='https://sanchonet.kuber.cardanoapi.io'
52-
- KUBER_API_KEY='l3HSdpCx86BOZFEvHspmYZ8KiVOnnvJDLKkC7JMVKUu9pZlV0Ld0qbn3pTQ3ER'
53-
- MANAGER_WALLET_ADDRESS=addr_test1qrd3hs7rlxwwdzthe6hj026dmyt3y0heuulctscyydh2kguh4xfmpjqkd25vfq69hcvj27jqyk4hvnyxu7vma2c4kvps8eh2m3
54-
- MANAGER_WALLET_SIGNING_KEY=addr_sk14wrrctnv9cyr05vjnwrjcs7mujzuxf4zj5nm7fna5nx7fxrnrupqwhwycx
55-
- FAUCET_API_KEY=ayEO6dlVF18oTslr9eIMUNgE35GqVfFz
52+
- KUBER_API_KEY=l3HSdpCx86BOZFEvHspmYZ8KiVOnnvJDLKkC7JMVKUu9pZlV0Ld0qbn3pTQ3ER
53+
- MANAGER_WALLET_ADDRESS=
54+
- MANAGER_WALLET_SIGNING_KEY=
55+
- FAUCET_API_KEY=
5656
- AGENT_MNEMONIC='group shadow belt culture garage sport demand target twist tribe milk dumb divide mango bench fantasy okay unfair error twice struggle naive squirrel romance'
5757
- METADATA_BASE_URL='https://metadata.drep.id'
5858
- DB_SYNC_BASE_URL='https://dbsyncapi.agents.cardanoapi.io/api'
@@ -62,7 +62,19 @@ services:
6262
- SERVER_PORT=3002
6363
- NETWORK_NAME='sanchonet'
6464

65-
# #Agent (build image)
65+
# DbSync
66+
dbsync:
67+
build:
68+
context: ./dbsync-api
69+
dockerfile: Dockerfile
70+
ports:
71+
- "9000:9000"
72+
environment:
73+
- PORT=9000
74+
- CORS_ENABLE=true
75+
- DATABASE_URL=postgres://dbsync:9PW%fj36ozwm^8u@mVwoh!uE&[email protected]:8433/dbsync_sanchonet
76+
77+
# #Agent (build image)
6678
# agent:
6779
# build:
6880
# context: ./agent-node

docker-compose.local.yml

Lines changed: 60 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -18,66 +18,66 @@ services:
1818
- cardano-autonomous-backend
1919

2020
# #Pgadmin
21-
# pgadmin:
22-
# image: dpage/pgadmin4:8.2
23-
# container_name: pg-admin
24-
# environment:
25-
# PGADMIN_DEFAULT_EMAIL: [email protected]
26-
# PGADMIN_DEFAULT_PASSWORD: password
27-
# ports:
28-
# - "5050:80"
29-
# networks:
30-
# - cardano-autonomous-backend
31-
#
32-
##Zookeper
33-
# zookeeper:
34-
# image: confluentinc/cp-zookeeper:7.0.1
35-
# container_name: zookeeper_agent_autonomous
36-
# ports:
37-
# - "2181:2181"
38-
# environment:
39-
# ZOOKEEPER_CLIENT_PORT: 2181
40-
# ZOOKEEPER_TICK_TIME: 2000
41-
# ZOOKEEPER_SYNC_LIMIT: 2
42-
# networks:
43-
# - cardano-autonomous-backend
44-
#
45-
# #Kafka
46-
# kafka:
47-
# image: confluentinc/cp-kafka:7.0.1
48-
# hostname: kafka
49-
# container_name: kafka_agent_autonomous
50-
# depends_on:
51-
# - zookeeper
52-
# ports:
53-
# - "9092:9092"
54-
# environment:
55-
# KAFKA_BROKER_ID: 1
56-
# KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
57-
# KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER_NET:PLAINTEXT,HOST_NET:PLAINTEXT
58-
# KAFKA_ADVERTISED_LISTENERS: DOCKER_NET://kafka:9093,HOST_NET://localhost:9092
59-
# KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER_NET
60-
# KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
61-
# KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
62-
# KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
63-
# KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
64-
# KAFKA_JMX_PORT: 9999
65-
# KAFKA_CREATE_TOPICS: "trigger_config_updates:1:1"
66-
# networks:
67-
# - cardano-autonomous-backend
68-
#
69-
##Kafka UI
70-
# kafka-ui:
71-
# image: provectuslabs/kafka-ui
72-
# container_name: kafka-ui-agent-autonomous
73-
# ports:
74-
# - "8080:8080"
75-
# restart: always
76-
# environment:
77-
# - KAFKA_CLUSTERS_0_NAME=local
78-
# - KAFKA_CLUSTERS_0_BOOTSTRAP_SERVERS=kafka:9093
79-
# networks:
80-
# - cardano-autonomous-backend
21+
pgadmin:
22+
image: dpage/pgadmin4:8.2
23+
container_name: pg-admin
24+
environment:
25+
PGADMIN_DEFAULT_EMAIL: [email protected]
26+
PGADMIN_DEFAULT_PASSWORD: password
27+
ports:
28+
- "5050:80"
29+
networks:
30+
- cardano-autonomous-backend
31+
32+
#Zookeper
33+
zookeeper:
34+
image: confluentinc/cp-zookeeper:7.0.1
35+
container_name: zookeeper_agent_autonomous
36+
ports:
37+
- "2181:2181"
38+
environment:
39+
ZOOKEEPER_CLIENT_PORT: 2181
40+
ZOOKEEPER_TICK_TIME: 2000
41+
ZOOKEEPER_SYNC_LIMIT: 2
42+
networks:
43+
- cardano-autonomous-backend
44+
45+
#Kafka
46+
kafka:
47+
image: confluentinc/cp-kafka:7.0.1
48+
hostname: kafka
49+
container_name: kafka_agent_autonomous
50+
depends_on:
51+
- zookeeper
52+
ports:
53+
- "9092:9092"
54+
environment:
55+
KAFKA_BROKER_ID: 1
56+
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
57+
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER_NET:PLAINTEXT,HOST_NET:PLAINTEXT
58+
KAFKA_ADVERTISED_LISTENERS: DOCKER_NET://kafka:9093,HOST_NET://localhost:9092
59+
KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER_NET
60+
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
61+
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
62+
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
63+
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
64+
KAFKA_JMX_PORT: 9999
65+
KAFKA_CREATE_TOPICS: "trigger_config_updates:1:1"
66+
networks:
67+
- cardano-autonomous-backend
68+
69+
#Kafka UI
70+
kafka-ui:
71+
image: provectuslabs/kafka-ui
72+
container_name: kafka-ui-agent-autonomous
73+
ports:
74+
- "8080:8080"
75+
restart: always
76+
environment:
77+
- KAFKA_CLUSTERS_0_NAME=local
78+
- KAFKA_CLUSTERS_0_BOOTSTRAP_SERVERS=kafka:9093
79+
networks:
80+
- cardano-autonomous-backend
8181

8282
networks:
8383
cardano-autonomous-backend:

0 commit comments

Comments
 (0)