From 41fdf96b5bccafa1ea491f59407cabb566635f4f Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Tue, 16 Sep 2025 08:55:36 +0100 Subject: [PATCH 01/22] change applications project to actual grantpicks project --- grantpicks/api.py | 12 ++++-------- grantpicks/serializers.py | 8 +++++++- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/grantpicks/api.py b/grantpicks/api.py index 5f5005d..25b6d5c 100644 --- a/grantpicks/api.py +++ b/grantpicks/api.py @@ -83,7 +83,6 @@ class RoundsListAPI(APIView, CustomSizePageNumberPagination): 500: OpenApiResponse(description="Internal server error"), }, ) - @method_decorator(cache_page(60 * 1)) def get(self, request: Request, *args, **kwargs): account_id = kwargs.get("account_id") if account_id: @@ -136,11 +135,10 @@ class RoundDetailAPI(APIView): 404: OpenApiResponse(description="Round not found"), }, ) - @method_decorator(cache_page(60 * 1)) def get(self, request: Request, *args, **kwargs): round_id = kwargs.get("round_id") try: - round = Round.objects.get(id=round_id) + round = Round.objects.get(on_chain_id=round_id) except Round.DoesNotExist: return Response({"message": f"Round with ID {round_id} not found."}, status=404) serializer = RoundSerializer(round) @@ -171,11 +169,10 @@ class RoundApplicationsAPI(APIView, CustomSizePageNumberPagination): 404: OpenApiResponse(description="Round not found"), }, ) - @method_decorator(cache_page(60 * 1)) def get(self, request: Request, *args, **kwargs): round_id = kwargs.get("round_id") try: - round = Round.objects.get(id=round_id) + round = Round.objects.get(on_chain_id=round_id) except Round.DoesNotExist: return Response({"message": f"Round with ID {round_id} not found."}, status=404) @@ -210,12 +207,11 @@ class ProjectRoundVotesAPI(APIView, CustomSizePageNumberPagination): 404: OpenApiResponse(description="Round or project not found"), }, ) - @method_decorator(cache_page(60 * 1)) def get(self, request: Request, *args, **kwargs): round_id = kwargs.get("round_id") project_id = kwargs.get("project_id") # Get project_id from kwargs try: - round_obj = Round.objects.get(id=round_id) + round_obj = Round.objects.get(on_chain_id=round_id) # project = Project.objects.get(id=project_id) # comment out now, might use later if decide to return vote pairs instead except Round.DoesNotExist: return Response({"message": f"Round with ID {round_id} not found."}, status=404) @@ -278,7 +274,7 @@ class ProjectListAPI(APIView, CustomSizePageNumberPagination): 500: OpenApiResponse(description="Internal server error"), }, ) - @method_decorator(cache_page(60 * 5)) + @method_decorator(cache_page(60 * 2)) def get(self, request: Request, *args, **kwargs): projects = Project.objects.all() status_param = request.query_params.get("status") diff --git a/grantpicks/serializers.py b/grantpicks/serializers.py index 3a84df3..88d95d2 100644 --- a/grantpicks/serializers.py +++ b/grantpicks/serializers.py @@ -130,6 +130,7 @@ class Meta: class RoundApplicationSerializer(ModelSerializer): + project = serializers.SerializerMethodField() class Meta: model = PotApplication @@ -149,7 +150,12 @@ class Meta: reviews = ApplicationReviewSerializer(many=True) round = RoundSerializer() applicant = AccountSerializer() - project = AccountSerializer() + + def get_project(self, obj): + if obj.project: + round_project = Project.objects.filter(owner=obj.project.id).first() + return ProjectSerializer(round_project).data + return None SIMPLE_PROJECT_EXAMPLE = { From e651c612f186b15f081dcafd5cf8815706c744cb Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Tue, 16 Sep 2025 13:59:11 +0100 Subject: [PATCH 02/22] create application for approved projects by owner --- indexer_app/tasks.py | 3 +-- indexer_app/utils.py | 22 +++++++++++++++++++++- 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index c279da5..9aba52d 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -415,12 +415,11 @@ def process_stellar_events(): event.processed = create_round_application(event_data, event.transaction_hash) - elif event_name == 'u_app': # application review and aproval event.processed = update_application(event_data, event.transaction_hash) elif event_name == 'u_ap': - event.processed = update_approved_projects(event_data) + event.processed = update_approved_projects(event_data, time_stamp=event.ingested_at, tx_hash=event.transaction_hash) elif event_name == 'c_depo': diff --git a/indexer_app/utils.py b/indexer_app/utils.py index eb72088..e6a59a2 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -1346,7 +1346,7 @@ def get_ledger_sequence() -> int: return record.block_height -def update_approved_projects(event_data, chain_id="stellar"): +def update_approved_projects(event_data, chain_id="stellar", time_stamp=None, tx_hash=None): round_id, project_ids = event_data[0], event_data[1] with transaction.atomic(): @@ -1356,6 +1356,22 @@ def update_approved_projects(event_data, chain_id="stellar"): for ids in project_ids: project = Project.objects.get(on_chain_id=ids) round_obj.approved_projects.add(project.owner) + logger.info(f"Creating application for round: {round_id} for approved projects") + status = PotApplicationStatus['Approved'.upper()] + + appl_defaults = { + "message": "added by owner", + "submitted_at": time_stamp or datetime.now(), + "status": status, + "tx_hash": tx_hash, + } + + PotApplication.objects.update_or_create( + applicant=project.owner, + round=round_obj, + project=project.owner, + defaults=appl_defaults, + ) return True except Exception as e: @@ -1683,6 +1699,10 @@ def create_or_update_round(event_data, contract_id, timestamp, chain_id="stellar } ) + for admin_address in event_data.get('admins', []): + admin, _ = Account.objects.get_or_create(defaults={"chain":chain}, id=admin_address) + round_obj.admins.add(admin) + # Create contacts for the round for contact in event_data.get('contacts', []): contact_obj, created = ProjectContact.objects.update_or_create( From 2e193ab9d67b711c41c4fcf36635811a13abb51d Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Tue, 23 Sep 2025 12:56:43 +0100 Subject: [PATCH 03/22] index lists on stellar --- .gitignore | 1 + base/celery.py | 30 ++-- base/settings.py | 3 +- indexer_app/tasks.py | 30 +++- indexer_app/utils.py | 144 +++++++++++++++++- lists/api.py | 17 +++ lists/migrations/0008_list_chain.py | 27 ++++ .../0009_alter_list_on_chain_id_and_more.py | 28 ++++ lists/models.py | 19 ++- 9 files changed, 274 insertions(+), 25 deletions(-) create mode 100644 lists/migrations/0008_list_chain.py create mode 100644 lists/migrations/0009_alter_list_on_chain_id_and_more.py diff --git a/.gitignore b/.gitignore index 923869c..bef67d0 100644 --- a/.gitignore +++ b/.gitignore @@ -65,6 +65,7 @@ media/ # Celery beat schedule file celerybeat-schedule dump.rdb +celerybeat-schedule.db # Flask stuff: instance/ diff --git a/base/celery.py b/base/celery.py index 9ba059d..fdc2044 100644 --- a/base/celery.py +++ b/base/celery.py @@ -25,21 +25,21 @@ app.autodiscover_tasks() app.conf.beat_schedule = { - "update_account_statistics_every_5_minutes": { - "task": "indexer_app.tasks.update_account_statistics", - "schedule": crontab(minute="*/5"), # Executes every 5 minutes - "options": {"queue": "beat_tasks"}, - }, - "fetch_usd_prices_every_5_minutes": { - "task": "indexer_app.tasks.fetch_usd_prices", - "schedule": crontab(minute="*/5"), # Executes every 5 minutes - "options": {"queue": "beat_tasks"}, - }, - "update_pot_statistics_every_5_minutes": { - "task": "indexer_app.tasks.update_pot_statistics", - "schedule": crontab(minute="*/5"), # Executes every 5 minutes - "options": {"queue": "beat_tasks"}, - }, + # "update_account_statistics_every_5_minutes": { + # "task": "indexer_app.tasks.update_account_statistics", + # "schedule": crontab(minute="*/5"), # Executes every 5 minutes + # "options": {"queue": "beat_tasks"}, + # }, + # "fetch_usd_prices_every_5_minutes": { + # "task": "indexer_app.tasks.fetch_usd_prices", + # "schedule": crontab(minute="*/5"), # Executes every 5 minutes + # "options": {"queue": "beat_tasks"}, + # }, + # "update_pot_statistics_every_5_minutes": { + # "task": "indexer_app.tasks.update_pot_statistics", + # "schedule": crontab(minute="*/5"), # Executes every 5 minutes + # "options": {"queue": "beat_tasks"}, + # }, "fetch_stellar_events_every_minute": { "task": "indexer_app.tasks.stellar_event_indexer", "schedule": crontab(minute="*/1"), # Executes every 1 minutes diff --git a/base/settings.py b/base/settings.py index 06ccd0c..5affa2d 100644 --- a/base/settings.py +++ b/base/settings.py @@ -70,6 +70,7 @@ NADABOT_TLA = "nadabot.testnet" if ENVIRONMENT == "testnet" else ("staging.nadabot.near" if ENVIRONMENT == "dev" else "nadabot.near") STELLAR_CONTRACT_ID = os.environ.get("PL_STELLAR_CONTRACT_ID", "") STELLAR_PROJECTS_REGISTRY_CONTRACT = os.environ.get("PL_STELLAR_PROJECTS_REGISTRY_CONTRACT", "") +STELLAR_LIST_CONTRACT = os.environ.get("PL_STELLAR_LIST_CONTRACT", "") NEAR_SOCIAL_CONTRACT_ADDRESS = ( "v1.social08.testnet" if ENVIRONMENT == "testnet" else "social.near" ) @@ -93,7 +94,7 @@ STELLAR_RPC_URL = ( "https://soroban-testnet.stellar.org" - if ENVIRONMENT == "testnet" + if ENVIRONMENT == "local" else "https://stellar-soroban-public.nodies.app" ) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 9aba52d..064565c 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -25,7 +25,13 @@ from pots.models import Pot, PotApplication, PotApplicationStatus, PotPayout from .logging import logger -from .utils import create_or_update_round, create_round_application, create_round_payout, get_block_height, get_ledger_sequence, process_application_to_round, process_project_event, process_rounds_deposit_event, process_vote_event, save_block_height, update_application, update_approved_projects, update_ledger_sequence, update_round_payout +from .utils import ( + create_or_update_round, create_round_application, create_round_payout, + get_block_height, get_ledger_sequence, handle_stellar_list_admin_ops, handle_stellar_list_update, process_application_to_round, + process_project_event, process_rounds_deposit_event, process_vote_event, + save_block_height, update_application, update_approved_projects, + update_ledger_sequence, update_round_payout, handle_stellar_list +) CURRENT_BLOCK_HEIGHT_KEY = "current_block_height" @@ -85,7 +91,7 @@ async def indexer(from_block: int, to_block: int): logger.info( f"Total time for one iteration: {iteration_end_time - fetch_start_time:.4f} seconds" ) - + except asyncio.TimeoutError: logger.warning("Stream stalled: no new blocks within timeout, restarting...") # raise Exception so sytemd can restart the worker @@ -333,16 +339,22 @@ def address_to_string(obj): return obj.address raise TypeError(f"Object of type {obj.__class__.__name__} is not JSON serializable") + + +# Todo: Change model so thatthe event indexer saves the event and queues a task to immediately process the event, +# so we don;t have a separate beat that's looping through + + @shared_task def stellar_event_indexer(): server = stellar_sdk.SorobanServer( settings.STELLAR_RPC_URL ) - contract_ids = [settings.STELLAR_CONTRACT_ID, settings.STELLAR_PROJECTS_REGISTRY_CONTRACT] - if contract_ids == ['', '']: + contract_ids = [settings.STELLAR_CONTRACT_ID, settings.STELLAR_PROJECTS_REGISTRY_CONTRACT, settings.STELLAR_LIST_CONTRACT] + if contract_ids == ['', '', '']: return start_sequence = get_ledger_sequence() - # start_sequence = 12169 + # start_sequence = 668843 if not start_sequence: start_sequence = 58655649 jobs_logger.info(f"Ingesting Stellar events from ledger {start_sequence}... contracts: {contract_ids}") @@ -433,6 +445,14 @@ def process_stellar_events(): elif event_name == "u_pay": event.processed = update_round_payout(event_data, event.transaction_hash) + elif event_name == "c_list": + event.processed = handle_stellar_list(event_data, event.contract_id, event.ingested_at) + elif event_name == "u_list": + event.processed = handle_stellar_list_update(event_data, event.contract_id, event.ingested_at) + elif event_name == "c_reg": + event.processed = handle_stellar_list(event_data, event.contract_id, event.transaction_hash) + elif event_name == "u_adm": + event.processed = handle_stellar_list_admin_ops(event_data, event.contract_id, event.ingested_at, event.transaction_hash) event.save() except Exception as e: diff --git a/indexer_app/utils.py b/indexer_app/utils.py index e6a59a2..b356e2c 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -906,7 +906,7 @@ async def handle_list_admin_ops(data, receiver_id, signer_id, receiptId): } activity, activity_created = await Activity.objects.aupdate_or_create( - type="List_Admin_Ops", defaults=activity + type="Add_List_Admin", defaults=activity ) except Exception as e: logger.error(f"Failed to remove list admin, Error: {e}") @@ -1612,7 +1612,7 @@ def process_project_event(event_data, chain_id="stellar"): project.admins.add(admin) # Associate team members - for team_member_data in project_data['team_members']: + for team_member_data in project_data.get('team_members', []): team_member, _ = Account.objects.get_or_create(id=team_member_data['value']) project.team_members.add(team_member) @@ -1888,6 +1888,146 @@ def update_round_payout(event_data, tx_hash, chain_id="stellar"): logger.error(f"Error updating Payout. {str(e)}") return False + + + +def handle_stellar_list(data, contract_id, timestamp, chain_id="stellar"): + # receipt = block.receipts().filter(receiptId=receiptId)[0] + try: + logger.info("upserting involveed accts...") + + owner_address = data.get('owner') + chain = Chain.objects.get(name=chain_id) + Account.objects.get_or_create(defaults={"chain":chain},id=owner_address) + + + logger.info(f"creating list..... {data}") + + listObject = List.objects.create( + on_chain_id=data["id"], + chain=chain, + owner_id=data["owner"], + default_registration_status=data["default_registration_status"][0], + name=data["name"], + description=data["description"], + cover_image_url=data["cover_img_url"], + admin_only_registrations=data["admin_only_registrations"], + created_at=datetime.fromtimestamp(data["created_ms"] / 1000), + updated_at=datetime.fromtimestamp(data["updated_ms"] / 1000), + ) + + if data.get("admins"): + for admin_id in data["admins"]: + admin_object, _ = Account.objects.get_or_create(defaults={"chain":chain}, + id=admin_id, + ) + listObject.admins.add(admin_object) + logger.info(f"created list for chain {chain.name}.....") + return True + except Exception as e: + logger.error(f"Failed to handle new list, Error: {e}") + return False + + +def handle_stellar_list_update(data, contract_id, timestamp, chain_id="stellar"): + try: + logger.info(f"updating list from result..... {data}") + + listObject = List.objects.filter(on_chain_id=data["id"]).update( + owner_id=data["owner"], + default_registration_status=data["default_registration_status"][0], + name=data["name"], + description=data["description"], + cover_image_url=data["cover_image_url"], + admin_only_registrations=data["admin_only_registrations"], + created_at=datetime.fromtimestamp(data["created_at"] / 1000), + updated_at=datetime.fromtimestamp(data["updated_at"] / 1000), + ) + return True + except Exception as e: + logger.error(f"Failed to handle list update, Error: {e}") + return False + + +def handle_new_stellar_list_registration(data, contract_id, tx_hash, chain_id="stellar"): + logger.info(f"new Project data: {data}") + # Prepare data for insertion + chain = Chain.objects.get(name=chain_id) + parent_list = List.objects.get(on_chain_id=data["list_id"]) + try: + project = Account.objects.get_or_create({"chain":chain, "id": data["registrant_id"]}) + except Exception as e: + logger.error(f"Encountered error trying to get create acct: {e}") + + logger.info(f"creating new List registration") + + try: + _ = ListRegistration.objects.create( + **{ + "id": data["id"], + "registrant_id": data["registrant_id"], + "list_id": parent_list.id, + "status": data["status"], + "submitted_at": datetime.fromtimestamp(data["submitted_ms"] / 1000), + "updated_at": datetime.fromtimestamp(data["updated_ms"] / 1000), + "registered_by_id": data["registered_by"], + "admin_notes": data.get("admin_notes"), + "registrant_notes": data.get("registrant_notes"), + "tx_hash": tx_hash, + } + ) + except Exception as e: + logger.error(f"Encountered error trying to create list: {e}") + + # Insert activity + try: + defaults = { + "signer_id": data["registered_by"], + "receiver_id": contract_id, + "timestamp": data["submitted_ms"], + "tx_hash": tx_hash, + } + + activity, activity_created = Activity.objects.update_or_create( + action_result=data, type="Register", defaults=defaults + ) + return True + except Exception as e: + logger.error(f"Encountered error trying to insert activity: {e}") + return False + + +def handle_stellar_list_admin_ops(data, contract_id, timestamp, tx_hash): + try: + round_id, admins = data[0], data[1] + logger.info(f"updating admins: {admins} for round {round_id}") + round_obj = Round.objects.get(on_chain_id=round_id) # select related? + chain = Chain.objects.get(name="stellar") + + for acct in admins: + admin, _ = Account.objects.get_or_create(defaults={"chain":chain},id=acct) + contains = round_obj.admins.acontains(admin) + if not contains: + round_obj.admins.add(admin) + for admin in round_obj.admins.all(): + if not admin.id in admins: + round_obj.admins.remove(admin) + + activity = { + "signer_id": round_obj.owner.id, + "receiver_id": contract_id, + "timestamp": timestamp, + "tx_hash": tx_hash, + } + + activity, activity_created = Activity.objects.update_or_create( + type="Add_List_Admin", defaults=activity + ) + return True + except Exception as e: + logger.error(f"Failed to remove list admin, Error: {e}") + return False + # Campaign Event Indexing Methods async def handle_new_campaign(data: dict, created_at): diff --git a/lists/api.py b/lists/api.py index b906d21..639e219 100644 --- a/lists/api.py +++ b/lists/api.py @@ -42,6 +42,12 @@ class ListsListAPI(APIView, CustomSizePageNumberPagination): OpenApiParameter.QUERY, description="Filter lists by account", ), + OpenApiParameter( + "chain", + str, + OpenApiParameter.QUERY, + description="Filter lists by chain id", + ), OpenApiParameter( "admin", str, @@ -72,6 +78,17 @@ class ListsListAPI(APIView, CustomSizePageNumberPagination): def get(self, request: Request, *args, **kwargs): lists = List.objects.all().select_related("owner").prefetch_related("admins", "upvotes").annotate(registrations_count=Count('registrations')) account_id = request.query_params.get("account") + chain = request.query_params.get("chain") + if chain: + lists = lists.filter(chain__id=chain) + if account_id: + try: + account = Chain.objects.get(name=account_id) + lists = lists.filter(owner=account) + except Account.DoesNotExist: + return Response( + {"message": f"Account with ID {account_id} not found."}, status=404 + ) if account_id: try: account = Account.objects.get(id=account_id) diff --git a/lists/migrations/0008_list_chain.py b/lists/migrations/0008_list_chain.py new file mode 100644 index 0000000..958d8e3 --- /dev/null +++ b/lists/migrations/0008_list_chain.py @@ -0,0 +1,27 @@ +# Generated by Django 5.0.6 on 2025-09-22 12:10 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("chains", "0003_add_stellar_chain"), + ("lists", "0007_alter_list_cover_image_url_alter_list_description_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="list", + name="chain", + field=models.ForeignKey( + default=1, + help_text="Blockchain this list was created on.", + on_delete=django.db.models.deletion.CASCADE, + related_name="lists", + related_query_name="list", + to="chains.chain", + ), + ), + ] diff --git a/lists/migrations/0009_alter_list_on_chain_id_and_more.py b/lists/migrations/0009_alter_list_on_chain_id_and_more.py new file mode 100644 index 0000000..afb6a58 --- /dev/null +++ b/lists/migrations/0009_alter_list_on_chain_id_and_more.py @@ -0,0 +1,28 @@ +# Generated by Django 5.0.6 on 2025-09-23 11:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("accounts", "0006_alter_account_near_social_profile_data"), + ("chains", "0003_add_stellar_chain"), + ("lists", "0008_list_chain"), + ] + + operations = [ + migrations.AlterField( + model_name="list", + name="on_chain_id", + field=models.IntegerField( + help_text="List ID in contract", verbose_name="contract list ID" + ), + ), + migrations.AddConstraint( + model_name="list", + constraint=models.UniqueConstraint( + fields=("on_chain_id", "chain"), name="unique_on_chain_id_per_chain" + ), + ), + ] diff --git a/lists/models.py b/lists/models.py index 9754f93..639d978 100644 --- a/lists/models.py +++ b/lists/models.py @@ -2,6 +2,7 @@ from django.utils.translation import gettext_lazy as _ from accounts.models import Account +from chains.models import Chain class ListRegistrationStatus(models.TextChoices): @@ -12,6 +13,8 @@ class ListRegistrationStatus(models.TextChoices): BLACKLISTED = "Blacklisted", "Blacklisted" + + class List(models.Model): id = models.AutoField( _("list id"), @@ -21,9 +24,16 @@ class List(models.Model): on_chain_id = models.IntegerField( _("contract list ID"), null=False, - unique=True, help_text=_("List ID in contract"), ) + chain = models.ForeignKey( + Chain, + default=1, + on_delete=models.CASCADE, + related_name="lists", + related_query_name="list", + help_text=_("Blockchain this list was created on."), + ) owner = models.ForeignKey( Account, on_delete=models.CASCADE, @@ -79,7 +89,12 @@ class List(models.Model): class Meta: indexes = [ - models.Index(fields=["created_at", "updated_at"], name="idx_list_stamps") + models.Index(fields=["created_at", "updated_at"], name="idx_list_stamps"), + ] + constraints = [ + models.UniqueConstraint( + fields=["on_chain_id", "chain"], name="unique_on_chain_id_per_chain" + ) ] From 5061ac07a60f1c98a405c242af0fd1b35079cf6d Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Wed, 24 Sep 2025 09:48:44 +0100 Subject: [PATCH 04/22] add chain to serializer --- base/settings.py | 2 +- lists/serializers.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/base/settings.py b/base/settings.py index 5affa2d..5ef90b6 100644 --- a/base/settings.py +++ b/base/settings.py @@ -94,7 +94,7 @@ STELLAR_RPC_URL = ( "https://soroban-testnet.stellar.org" - if ENVIRONMENT == "local" + if ENVIRONMENT == "testnet" else "https://stellar-soroban-public.nodies.app" ) diff --git a/lists/serializers.py b/lists/serializers.py index 697e9b2..a5cb068 100644 --- a/lists/serializers.py +++ b/lists/serializers.py @@ -19,6 +19,7 @@ class Meta: fields = [ "id", "on_chain_id", + "chain", "owner", "admins", "name", From 504efcc81c18f2f094f566b1acb785a21928964c Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Wed, 24 Sep 2025 14:34:05 +0100 Subject: [PATCH 05/22] fetch list detail by chain and id --- api/urls.py | 2 +- lists/api.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/api/urls.py b/api/urls.py index 59e60c5..07cf61f 100644 --- a/api/urls.py +++ b/api/urls.py @@ -135,7 +135,7 @@ path("v1/donors", DonorsAPI.as_view(), name="donors_api"), # lists path("v1/lists", ListsListAPI.as_view(), name="lists_api"), - path("v1/lists/", ListDetailAPI.as_view(), name="lists_api_by_id"), + path("v1/lists//", ListDetailAPI.as_view(), name="lists_api_by_id"), path( "v1/lists//registrations", ListRegistrationsAPI.as_view(), diff --git a/lists/api.py b/lists/api.py index 639e219..631c344 100644 --- a/lists/api.py +++ b/lists/api.py @@ -116,6 +116,7 @@ class ListDetailAPI(APIView): @extend_schema( parameters=[ OpenApiParameter("list_id", int, OpenApiParameter.PATH), + OpenApiParameter("chain", int, OpenApiParameter.PATH), ], responses={ 200: OpenApiResponse( @@ -138,11 +139,12 @@ class ListDetailAPI(APIView): @method_decorator(cache_page(60 * 5)) def get(self, request: Request, *args, **kwargs): list_id = kwargs.get("list_id") + chain = kwargs.get("chain") try: - list_obj = List.objects.select_related("owner").prefetch_related("admins").get(on_chain_id=list_id) + list_obj = List.objects.select_related("owner").prefetch_related("admins").get(on_chain_id=list_id, chain=chain) except List.DoesNotExist: return Response( - {"message": f"List with onchain ID {list_id} not found."}, status=404 + {"message": f"List with onchain ID {list_id} not found on chain {chain}."}, status=404 ) serializer = ListSerializer(list_obj) return Response(serializer.data) From a4ded254e2cca34c5cd5feee3bfba77ad6ce00b3 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Wed, 24 Sep 2025 16:16:23 +0100 Subject: [PATCH 06/22] make chain query param for list deets --- api/urls.py | 2 +- lists/api.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/api/urls.py b/api/urls.py index 07cf61f..59e60c5 100644 --- a/api/urls.py +++ b/api/urls.py @@ -135,7 +135,7 @@ path("v1/donors", DonorsAPI.as_view(), name="donors_api"), # lists path("v1/lists", ListsListAPI.as_view(), name="lists_api"), - path("v1/lists//", ListDetailAPI.as_view(), name="lists_api_by_id"), + path("v1/lists/", ListDetailAPI.as_view(), name="lists_api_by_id"), path( "v1/lists//registrations", ListRegistrationsAPI.as_view(), diff --git a/lists/api.py b/lists/api.py index 631c344..c523848 100644 --- a/lists/api.py +++ b/lists/api.py @@ -116,7 +116,12 @@ class ListDetailAPI(APIView): @extend_schema( parameters=[ OpenApiParameter("list_id", int, OpenApiParameter.PATH), - OpenApiParameter("chain", int, OpenApiParameter.PATH), + OpenApiParameter( + "chain", + str, + OpenApiParameter.QUERY, + description="Filter lists by chain id", + ), ], responses={ 200: OpenApiResponse( @@ -139,12 +144,12 @@ class ListDetailAPI(APIView): @method_decorator(cache_page(60 * 5)) def get(self, request: Request, *args, **kwargs): list_id = kwargs.get("list_id") - chain = kwargs.get("chain") + chain = request.query_params.get("chain") try: - list_obj = List.objects.select_related("owner").prefetch_related("admins").get(on_chain_id=list_id, chain=chain) + list_obj = List.objects.select_related("owner").prefetch_related("admins").get(on_chain_id=list_id, chain=1 if not chain else chain) except List.DoesNotExist: return Response( - {"message": f"List with onchain ID {list_id} not found on chain {chain}."}, status=404 + {"message": f"List with onchain ID {list_id} not found."}, status=404 ) serializer = ListSerializer(list_obj) return Response(serializer.data) From 4a9a375d1b8044447a0f00c1adad9f0ad4af0dad Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Wed, 24 Sep 2025 19:41:11 +0100 Subject: [PATCH 07/22] filter by chain name --- lists/api.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/lists/api.py b/lists/api.py index c523848..85bfa4b 100644 --- a/lists/api.py +++ b/lists/api.py @@ -78,9 +78,8 @@ class ListsListAPI(APIView, CustomSizePageNumberPagination): def get(self, request: Request, *args, **kwargs): lists = List.objects.all().select_related("owner").prefetch_related("admins", "upvotes").annotate(registrations_count=Count('registrations')) account_id = request.query_params.get("account") - chain = request.query_params.get("chain") - if chain: - lists = lists.filter(chain__id=chain) + chain = request.query_params.get("chain", "NEAR") + lists = lists.filter(chain__name=chain) if account_id: try: account = Chain.objects.get(name=account_id) @@ -146,7 +145,7 @@ def get(self, request: Request, *args, **kwargs): list_id = kwargs.get("list_id") chain = request.query_params.get("chain") try: - list_obj = List.objects.select_related("owner").prefetch_related("admins").get(on_chain_id=list_id, chain=1 if not chain else chain) + list_obj = List.objects.select_related("owner").prefetch_related("admins").get(on_chain_id=list_id, chain__name="NEAR" if not chain else chain) except List.DoesNotExist: return Response( {"message": f"List with onchain ID {list_id} not found."}, status=404 @@ -160,6 +159,12 @@ class ListRegistrationsAPI(APIView, CustomSizePageNumberPagination): @extend_schema( parameters=[ OpenApiParameter("list_id", int, OpenApiParameter.PATH), + OpenApiParameter( + "chain", + str, + OpenApiParameter.QUERY, + description="Filter registrations by list chain: ('NEAR', 'stellar')", + ), OpenApiParameter( "status", str, @@ -201,8 +206,9 @@ class ListRegistrationsAPI(APIView, CustomSizePageNumberPagination): @method_decorator(cache_page(60 * 1)) def get(self, request: Request, *args, **kwargs): list_id = kwargs.get("list_id") + chain = request.query_params.get("chain") # list_obj = List.objects.get(on_chain_id=list_id) - registrations = ListRegistration.objects.filter(list__on_chain_id=list_id).select_related("list", "list__owner", "registrant", "registered_by").prefetch_related("list__admins", "list__upvotes") + registrations = ListRegistration.objects.filter(list__on_chain_id=list_id, list__chain__name="NEAR" if not chain else chain).select_related("list__chain", "list__owner", "registrant", "registered_by").prefetch_related("list__admins", "list__upvotes") # registrations = list_obj.registrations.select_related("list", "list__owner", "registrant", "registered_by").prefetch_related("list__admins").annotate(registrations_count=Count('list_registrations')).all() status_param = request.query_params.get("status") From b78a6d0320bdd78464832b7f3217894a0c1d17f9 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Fri, 26 Sep 2025 12:25:51 +0100 Subject: [PATCH 08/22] fix susbcription: --- indexer_app/utils.py | 794 +++++++++++++++++++++++++++---------------- 1 file changed, 492 insertions(+), 302 deletions(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index b356e2c..5568d61 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -18,7 +18,18 @@ from campaigns.models import Campaign, CampaignDonation from chains.models import Chain from donations.models import Donation -from grantpicks.models import Project, ProjectContact, ProjectContract, ProjectFundingHistory, ProjectRepository, ProjectStatus, Round, RoundDeposit, Vote, VotePair +from grantpicks.models import ( + Project, + ProjectContact, + ProjectContract, + ProjectFundingHistory, + ProjectRepository, + ProjectStatus, + Round, + RoundDeposit, + Vote, + VotePair, +) from indexer_app.models import BlockHeight from lists.models import List, ListRegistration, ListUpvote from nadabot.models import BlackList, Group, NadabotRegistry, Provider, Stamp @@ -64,8 +75,12 @@ async def handle_new_nadabot_registry( logger.info(f"nadabot registry init... {data}") try: - registry, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=receiverId) - owner, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=data["owner"]) + registry, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=receiverId + ) + owner, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=data["owner"] + ) nadabot_registry, created = await NadabotRegistry.objects.aupdate_or_create( account=registry, owner=owner, @@ -76,7 +91,9 @@ async def handle_new_nadabot_registry( if data.get("admins"): for admin_id in data["admins"]: - admin, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=admin_id) + admin, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=admin_id + ) await nadabot_registry.admins.aadd(admin) except Exception as e: logger.error(f"Error in registry initiialization: {e}") @@ -88,10 +105,14 @@ async def handle_registry_blacklist_action( logger.info(f"Registry blacklist action....... {data}") try: - registry, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=receiverId) + registry, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=receiverId + ) bulk_obj = [] for acct in data["accounts"]: - account, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=acct) + account, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=acct + ) bulk_obj.append( { "registry": registry, @@ -113,7 +134,9 @@ async def handle_registry_unblacklist_action( logger.info(f"Registry remove blacklisted accts....... {data}") try: - registry, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=receiverId) + registry, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=receiverId + ) entries = BlackList.objects.filter(account__in=data["accounts"]) await entries.adelete() except Exception as e: @@ -129,16 +152,21 @@ async def handle_new_pot( created_at: datetime, ): try: - logger.info("new pot deployment process... upsert accounts,") # Upsert accounts owner_id = ( data.get("owner") or signer_id ) # owner is optional; if not provided, owner will be transaction signer (this logic is implemented by Pot contract's "new" method) - owner, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=owner_id) - signer, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=signer_id) - receiver, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=receiver_id) + owner, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=owner_id + ) + signer, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=signer_id + ) + receiver, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=receiver_id + ) # check if pot exists pot = await Pot.objects.filter(account=receiver).afirst() @@ -150,7 +178,9 @@ async def handle_new_pot( logger.info("upsert chef") if data.get("chef"): - chef, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=data["chef"]) + chef, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=data["chef"] + ) # Create Pot object logger.info(f"creating pot with owner {owner_id}....") @@ -178,7 +208,10 @@ async def handle_new_pot( data["public_round_end_ms"] / 1000 ), "registry_provider": data["registry_provider"], - "min_matching_pool_donation_amount": data.get("min_matching_pool_donation_amount") or "0", + "min_matching_pool_donation_amount": data.get( + "min_matching_pool_donation_amount" + ) + or "0", "sybil_wrapper_provider": data.get("sybil_wrapper_provider"), "custom_sybil_checks": data.get("custom_sybil_checks"), "custom_min_threshold_score": data.get("custom_min_threshold_score"), @@ -203,7 +236,9 @@ async def handle_new_pot( # Add admins to the Pot if data.get("admins"): for admin_id in data["admins"]: - admin, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=admin_id) + admin, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=admin_id + ) await pot.admins.aadd(admin) defaults = { @@ -288,18 +323,20 @@ async def handle_pot_config_update( async def handle_new_pot_factory(data: dict, receiver_id: str, created_at: datetime): try: - logger.info("upserting accounts...") # Upsert accounts - owner, _ = await Account.objects.aget_or_create(defaults={"chain_id":1}, + owner, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=data["owner"], ) - protocol_fee_recipient_account, _ = await Account.objects.aget_or_create(defaults={"chain_id":1}, + protocol_fee_recipient_account, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=data["protocol_fee_recipient_account"], ) - receiver, _ = await Account.objects.aget_or_create(defaults={"chain_id":1}, + receiver, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=receiver_id, ) @@ -320,7 +357,8 @@ async def handle_new_pot_factory(data: dict, receiver_id: str, created_at: datet # Add admins to the PotFactory if data.get("admins"): for admin_id in data["admins"]: - admin, _ = await Account.objects.aget_or_create(defaults={"chain_id":1}, + admin, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=admin_id, ) await factory.admins.aadd(admin) @@ -328,30 +366,38 @@ async def handle_new_pot_factory(data: dict, receiver_id: str, created_at: datet # Add whitelisted deployers to the PotFactory if data.get("whitelisted_deployers"): for deployer_id in data["whitelisted_deployers"]: - deployer, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=deployer_id) + deployer, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=deployer_id + ) await factory.whitelisted_deployers.aadd(deployer) except Exception as e: logger.error(f"Failed to handle new pot Factory, Error: {e}") -async def handle_new_list_and_reg(signer_id: str, receiver_id: str, status_obj: ExecutionOutcome, receipt: Receipt): +async def handle_new_list_and_reg( + signer_id: str, receiver_id: str, status_obj: ExecutionOutcome, receipt: Receipt +): create_data, reg_data = json.loads( - base64.b64decode(status_obj.status.get("SuccessValue")).decode( - "utf-8" - ) # TODO: RECEIVE AS A FUNCTION ARGUMENT - ) + base64.b64decode(status_obj.status.get("SuccessValue")).decode( + "utf-8" + ) # TODO: RECEIVE AS A FUNCTION ARGUMENT + ) await handle_new_list(signer_id, receiver_id, None, create_data) if reg_data: - await handle_new_list_registration(reg_data, receiver_id, signer_id, receipt, None) + await handle_new_list_registration( + reg_data, receiver_id, signer_id, receipt, None + ) pass async def handle_new_list( - signer_id: str, receiver_id: str, status_obj: ExecutionOutcome | None, data: dict | None + signer_id: str, + receiver_id: str, + status_obj: ExecutionOutcome | None, + data: dict | None, ): # receipt = block.receipts().filter(receiptId=receiptId)[0] try: - if not data: data = json.loads( base64.b64decode(status_obj.status.get("SuccessValue")).decode( @@ -361,11 +407,11 @@ async def handle_new_list( logger.info("upserting involveed accts...") - await Account.objects.aget_or_create(defaults={"chain_id":1},id=data["owner"]) + await Account.objects.aget_or_create(defaults={"chain_id": 1}, id=data["owner"]) - await Account.objects.aget_or_create(defaults={"chain_id":1},id=signer_id) + await Account.objects.aget_or_create(defaults={"chain_id": 1}, id=signer_id) - await Account.objects.aget_or_create(defaults={"chain_id":1},id=receiver_id) + await Account.objects.aget_or_create(defaults={"chain_id": 1}, id=receiver_id) logger.info(f"creating list..... {data}") @@ -383,7 +429,8 @@ async def handle_new_list( if data.get("admins"): for admin_id in data["admins"]: - admin_object, _ = await Account.objects.aget_or_create(defaults={"chain_id":1}, + admin_object, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=admin_id, ) await listObject.admins.aadd(admin_object) @@ -391,12 +438,13 @@ async def handle_new_list( logger.error(f"Failed to handle new list, Error: {e}") - async def handle_list_update( - signer_id: str, receiver_id: str, status_obj: ExecutionOutcome | None, data: dict | None + signer_id: str, + receiver_id: str, + status_obj: ExecutionOutcome | None, + data: dict | None, ): try: - if not data: data = json.loads( base64.b64decode(status_obj.status.get("SuccessValue")).decode( @@ -404,7 +452,6 @@ async def handle_list_update( ) # TODO: RECEIVE AS A FUNCTION ARGUMENT ) - logger.info(f"updating list from result..... {data}") listObject = await List.objects.filter(on_chain_id=data["id"]).aupdate( @@ -428,15 +475,14 @@ async def handle_list_update( logger.error(f"Failed to handle list update, Error: {e}") -async def handle_delete_list( - data: dict -): +async def handle_delete_list(data: dict): try: logger.info(f"deleting list..... {data}") lst = await List.objects.filter(on_chain_id=data["list_id"]).adelete() except Exception as e: logger.error(f"Failed to delete, Error: {e}") + async def handle_new_list_registration( data: dict, receiver_id: str, @@ -464,7 +510,7 @@ async def handle_new_list_registration( parent_list = await List.objects.aget(on_chain_id=reg_data[0]["list_id"]) for dt in reg_data: logger.info(f"dt: {dt}") - project_list.append({"chain_id":1, "id": dt["registrant_id"]}) + project_list.append({"chain_id": 1, "id": dt["registrant_id"]}) insert_data.append( { "id": dt["id"], @@ -485,7 +531,7 @@ async def handle_new_list_registration( await Account.objects.abulk_create( objs=[Account(**data) for data in project_list], ignore_conflicts=True ) - await Account.objects.aget_or_create(defaults={"chain_id":1},id=signer_id) + await Account.objects.aget_or_create(defaults={"chain_id": 1}, id=signer_id) logger.info("Upserted accounts/registrants(signer)") except Exception as e: logger.error(f"Encountered error trying to get create acct: {e}") @@ -526,6 +572,7 @@ async def handle_list_registration_removal( except Exception as e: logger.error(f"Encountered error trying to remove reg: {e}") + async def handle_list_registration_update( data: dict, receiver_id: str, status_obj: ExecutionOutcome ): @@ -560,7 +607,6 @@ async def handle_pot_application( created_at: datetime, ): try: - # receipt = block.receipts().filter(lambda receipt: receipt.receiptId == receiptId)[0] result = status_obj.status.get("SuccessValue") if not result: @@ -572,12 +618,14 @@ async def handle_pot_application( logger.info(f"new pot application data: {data}, {appl_data}") # Update or create the account - project, _ = await Account.objects.aget_or_create(defaults={"chain_id":1}, + project, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=appl_data["project_id"], ) # TODO: wouldn't this be the same as the project_id? should inspect - signer, _ = await Account.objects.aget_or_create(defaults={"chain_id":1}, + signer, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=signer_id, ) @@ -590,12 +638,13 @@ async def handle_pot_application( "status": appl_data["status"], "tx_hash": receipt.receipt_id, } - application, application_created = ( - await PotApplication.objects.aupdate_or_create( - applicant=project, - pot_id=receiver_id, - defaults=appl_defaults, - ) + ( + application, + application_created, + ) = await PotApplication.objects.aupdate_or_create( + applicant=project, + pot_id=receiver_id, + defaults=appl_defaults, ) # Create the activity object @@ -627,7 +676,6 @@ async def handle_pot_application_status_change( status_obj: ExecutionOutcome, ): try: - logger.info(f"pot application update data: {data}, {receiver_id}") # receipt = next(receipt for receipt in block.receipts() if receipt.receiptId == receiptId) @@ -638,7 +686,13 @@ async def handle_pot_application_status_change( ) # Retrieve the PotApplication object - appl = await PotApplication.objects.select_related('round', 'pot', 'project', 'applicant').filter(applicant_id=data["project_id"], pot_id=receiver_id).afirst() + appl = ( + await PotApplication.objects.select_related( + "round", "pot", "project", "applicant" + ) + .filter(applicant_id=data["project_id"], pot_id=receiver_id) + .afirst() + ) if not appl: logger.error( @@ -664,8 +718,12 @@ async def handle_pot_application_status_change( ) # Update the PotApplication object - await PotApplication.objects.select_related('round', 'pot', 'project', 'applicant').filter(applicant_id=data["project_id"], pot_id=receiver_id).aupdate( - **{"status": update_data["status"], "updated_at": updated_at} + await ( + PotApplication.objects.select_related( + "round", "pot", "project", "applicant" + ) + .filter(applicant_id=data["project_id"], pot_id=receiver_id) + .aupdate(**{"status": update_data["status"], "updated_at": updated_at}) ) logger.info("PotApplicationReview and PotApplication updated successfully.") @@ -677,7 +735,6 @@ async def handle_default_list_status_change( data: dict, receiver_id: str, status_obj: ExecutionOutcome ): try: - logger.info(f"update project data: {data}, {receiver_id}") result_data = json.loads( @@ -710,24 +767,19 @@ async def handle_list_upvote( data: dict, receiver_id: str, signer_id: str, receiptId: str, created_at: datetime ): try: - logger.info(f"upvote list: {data}, {receiver_id}") - acct, _ = await Account.objects.aget_or_create(defaults={"chain_id":1}, + acct, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=signer_id, ) - - up_default = { - "created_at": created_at - } + up_default = {"created_at": created_at} list_obj = await List.objects.aget(on_chain_id=data.get("list_id")) await ListUpvote.objects.aupdate_or_create( - list=list_obj, - account_id=signer_id, - defaults=up_default + list=list_obj, account_id=signer_id, defaults=up_default ) defaults = { @@ -748,30 +800,25 @@ async def handle_list_upvote( logger.error(f"Failed to upvote list, Error: {e}") - -async def handle_remove_upvote( - data: dict, receiver_id: str, signer_id: str -): +async def handle_remove_upvote(data: dict, receiver_id: str, signer_id: str): try: - logger.info(f"remove upvote from list: {data}, {receiver_id}") list_obj = await List.objects.aget(on_chain_id=data.get("list_id")) await ListUpvote.objects.filter(list=list_obj, account_id=signer_id).adelete() - logger.info( - f"Upvote removed successfully" - ) + logger.info(f"Upvote removed successfully") except Exception as e: logger.error(f"Failed to remove upvote from list, Error: {e}") async def handle_set_payouts(data: dict, receiver_id: str, receipt: Receipt): try: - logger.info(f"set payout data: {data}, {receiver_id}") payouts = data.get("payouts", []) pot = await Pot.objects.aget(account=receiver_id) - near_acct, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id="near") + near_acct, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id="near" + ) near_token, _ = await Token.objects.aget_or_create( account=near_acct ) # Pots only support native NEAR @@ -807,7 +854,6 @@ async def handle_transfer_payout( data: dict, receiver_id: str, receiptId: str, created_at: datetime ): try: - data = data["payout"] logger.info(f"fulfill payout data: {data}, {receiver_id}, {created_at}") payout = { @@ -837,7 +883,9 @@ async def handle_payout_challenge( data: dict, receiver_id: str, signer_id: str, receiptId: str, created_at: datetime ): try: - acct, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=signer_id) + acct, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=signer_id + ) logger.info(f"challenging payout..: {data}, {receiver_id}") payoutChallenge = { "created_at": created_at, @@ -885,12 +933,13 @@ async def handle_payout_challenge_response( async def handle_list_admin_ops(data, receiver_id, signer_id, receiptId): try: - logger.info(f"updating admin...: {data}, {receiver_id}") list_obj = await List.objects.aget(on_chain_id=data["list_id"]) for acct in data["admins"]: - admin, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=acct) + admin, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=acct + ) contains = await list_obj.admins.acontains(admin) if not contains: await list_obj.admins.aadd(admin) @@ -915,20 +964,23 @@ async def handle_list_admin_ops(data, receiver_id, signer_id, receiptId): async def handle_list_owner_change(data): try: logger.info(f"changing owner... ...: {data}") - await List.objects.filter(id=data["list_id"]).aupdate(**{ - "owner": data["new_owner_id"] - }) + await List.objects.filter(id=data["list_id"]).aupdate( + **{"owner": data["new_owner_id"]} + ) except Exception as e: logger.error(f"Failed to change list owner, Error: {e}") + async def handle_add_nadabot_admin(data, receiverId): logger.info(f"adding admin...: {data}, {receiverId}") try: obj = await NadabotRegistry.objects.aget(account=receiverId) for acct in data["account_ids"]: - user, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=acct) + user, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=acct + ) await obj.admins.aadd(user) except Exception as e: logger.error(f"Failed to add nadabot admin, Error: {e}") @@ -939,7 +991,9 @@ async def handle_add_factory_deployers(data, receiverId): try: factory = await PotFactory.objects.aget(account=receiverId) for acct in data["whitelisted_deployers"]: - user, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=acct) + user, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=acct + ) await factory.whitelisted_deployers.aadd(user) except Exception as e: logger.error(f"Failed to add factory whitelisted deployers, Error: {e}") @@ -955,8 +1009,6 @@ async def handle_set_factory_configs(data, receiverId): logger.error(f"Failed to update factory configs, Error: {e}") - - # # TODO: Need to abstract some actions. # async def handle_batch_donations( # receiver_id: str, @@ -1008,33 +1060,41 @@ async def handle_new_donation( try: # insert donate contract which is the receiver id(because of activity relationship mainly) - donate_contract, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=receiver_id) + donate_contract, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=receiver_id + ) # Upsert donor account - donor, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=donation_data["donor_id"]) + donor, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=donation_data["donor_id"] + ) recipient = None referrer = None chef = None if donation_data.get("recipient_id"): # direct donations have recipient_id - recipient, _ = await Account.objects.aget_or_create(defaults={"chain_id":1}, - id=donation_data["recipient_id"] + recipient, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=donation_data["recipient_id"] ) if donation_data.get("project_id"): # pot donations have project_id - recipient, _ = await Account.objects.aget_or_create(defaults={"chain_id":1}, - id=donation_data["project_id"] + recipient, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=donation_data["project_id"] ) if donation_data.get("referrer_id"): - referrer, _ = await Account.objects.aget_or_create(defaults={"chain_id":1}, - id=donation_data["referrer_id"] + referrer, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=donation_data["referrer_id"] ) if donation_data.get("chef_id"): - chef, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=donation_data["chef_id"]) + chef, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=donation_data["chef_id"] + ) # Upsert token account ft_id = donation_data.get("ft_id") or "near" - token_acct, token_acct_created = await Account.objects.aget_or_create(defaults={"chain_id":1},id=ft_id) + token_acct, token_acct_created = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=ft_id + ) token_defaults = { "decimals": 24, } @@ -1065,7 +1125,6 @@ async def handle_new_donation( logger.error(f"Failed to create/get an account involved in donation: {e}") try: - total_amount = donation_data["total_amount"] logger.info(f"inserting {donation_type} donation") @@ -1199,7 +1258,6 @@ async def handle_update_default_human_threshold(data: dict, receiverId: str): logger.info(f"update threshold data... {data}") try: - reg = await NadabotRegistry.objects.filter(account=receiverId).aupdate( **{"default_human_threshold": data["default_human_threshold"]} ) @@ -1217,8 +1275,12 @@ async def handle_new_provider(data: dict, receiverId: str, signerId: str): ) try: - submitter, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=data["submitted_by"]) - contract, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=data["contract_id"]) + submitter, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=data["submitted_by"] + ) + contract, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=data["contract_id"] + ) provider_id = data["id"] @@ -1263,7 +1325,9 @@ async def handle_add_stamp(data: dict, receiverId: str, signerId: str): logger.info(f"upserting accounts involved, {data['user_id']}") - user, _ = await Account.objects.aget_or_create(defaults={"chain_id":1},id=data["user_id"]) + user, _ = await Account.objects.aget_or_create( + defaults={"chain_id": 1}, id=data["user_id"] + ) provider, _ = await Provider.objects.aget_or_create(on_chain_id=data["provider_id"]) try: @@ -1329,7 +1393,6 @@ def get_block_height() -> int: return 178243042 - def update_ledger_sequence(sequence, timestamp: datetime): BlockHeight.objects.update_or_create( id=2, @@ -1340,13 +1403,16 @@ def update_ledger_sequence(sequence, timestamp: datetime): }, ) + def get_ledger_sequence() -> int: record = BlockHeight.objects.filter(id=2).first() if record: return record.block_height -def update_approved_projects(event_data, chain_id="stellar", time_stamp=None, tx_hash=None): +def update_approved_projects( + event_data, chain_id="stellar", time_stamp=None, tx_hash=None +): round_id, project_ids = event_data[0], event_data[1] with transaction.atomic(): @@ -1356,8 +1422,10 @@ def update_approved_projects(event_data, chain_id="stellar", time_stamp=None, tx for ids in project_ids: project = Project.objects.get(on_chain_id=ids) round_obj.approved_projects.add(project.owner) - logger.info(f"Creating application for round: {round_id} for approved projects") - status = PotApplicationStatus['Approved'.upper()] + logger.info( + f"Creating application for round: {round_id} for approved projects" + ) + status = PotApplicationStatus["Approved".upper()] appl_defaults = { "message": "added by owner", @@ -1378,28 +1446,41 @@ def update_approved_projects(event_data, chain_id="stellar", time_stamp=None, tx logger.error(f"Error updating application for Round {round_id}: {e}") return False + def update_application(event_data, txhash, reviewer_id=None, chain_id="stellar"): if type(event_data) == list: - round_id, application_data, reviewer_id = event_data[0], event_data[1], event_data[2] + round_id, application_data, reviewer_id = ( + event_data[0], + event_data[1], + event_data[2], + ) else: - event_data = event_data['application'] - round_id, application_data, reviewer_id = event_data["round_id"], event_data, reviewer_id + event_data = event_data["application"] + round_id, application_data, reviewer_id = ( + event_data["round_id"], + event_data, + reviewer_id, + ) with transaction.atomic(): try: chain = Chain.objects.get(name=chain_id) round_obj = Round.objects.get(on_chain_id=round_id, chain=chain) - applicant = Account.objects.get(id=application_data['applicant_id'], chain=chain) + applicant = Account.objects.get( + id=application_data["applicant_id"], chain=chain + ) reviewer = Account.objects.get(id=reviewer_id, chain=chain) if chain_id == "NEAR": - status = PotApplicationStatus[application_data['status'].upper()] + status = PotApplicationStatus[application_data["status"].upper()] else: - status = PotApplicationStatus[application_data['status'][0].upper()] + status = PotApplicationStatus[application_data["status"][0].upper()] - submitted_at = datetime.fromtimestamp(application_data['submited_ms'] / 1000) - updated_at = datetime.fromtimestamp(application_data['updated_ms'] / 1000) + submitted_at = datetime.fromtimestamp( + application_data["submited_ms"] / 1000 + ) + updated_at = datetime.fromtimestamp(application_data["updated_ms"] / 1000) defaults = { "notes": application_data.get("review_note"), @@ -1407,9 +1488,7 @@ def update_application(event_data, txhash, reviewer_id=None, chain_id="stellar") "tx_hash": txhash, } - appl = PotApplication.objects.filter( - applicant=applicant - ).first() + appl = PotApplication.objects.filter(applicant=applicant).first() PotApplicationReview.objects.update_or_create( application_id=appl.id, @@ -1418,11 +1497,15 @@ def update_application(event_data, txhash, reviewer_id=None, chain_id="stellar") defaults=defaults, ) - project = Project.objects.get(on_chain_id=application_data.get("project_id")) + project = Project.objects.get( + on_chain_id=application_data.get("project_id") + ) if status == PotApplicationStatus.APPROVED: - # If the application is approved, add the project to the round's approved projects + # If the application is approved, add the project to the round's approved projects if not round_obj.approved_projects.filter(id=project.owner.id).exists(): - logger.info(f"Adding project {project.owner.id} to approved projects for Round {round_id}") + logger.info( + f"Adding project {project.owner.id} to approved projects for Round {round_id}" + ) round_obj.approved_projects.add(project.owner) else: round_obj.approved_projects.remove(project.owner) @@ -1449,23 +1532,22 @@ def get_pair_projects(pair_id: int, round_id: int, chain_id: str) -> Dict: contract_id = settings.STELLAR_CONTRACT_ID function_name = "get_pair_by_index" - parameters = [stellar_sdk.scval.to_uint128(round_id), stellar_sdk.scval.to_uint32(pair_id)] + parameters = [ + stellar_sdk.scval.to_uint128(round_id), + stellar_sdk.scval.to_uint32(pair_id), + ] public_key = "GAMFYFI7TIAPMLSAWIECFZCN52TR3NUIO74YM7ECBCPM6J743KENH367" # TODO: move to settings acct = server.load_account(public_key) pair_result = server.simulate_transaction( transaction_envelope=stellar_sdk.TransactionBuilder( source_account=acct, - ).append_invoke_contract_function_op( - contract_id, - function_name, - parameters ) + .append_invoke_contract_function_op(contract_id, function_name, parameters) .set_timeout(30) .build() ) - if pair_result.results: xdr = pair_result.results[0].xdr data = stellar_sdk.scval.to_native(xdr) @@ -1489,57 +1571,58 @@ def process_vote_event(event_data, tx_hash, chain_id="stellar"): round_id, vote_data = event_data[0], event_data[1] else: # vote_event_data = event_data['vote'] - round_id, vote_data = event_data.get("round_id"), event_data['vote'] + round_id, vote_data = event_data.get("round_id"), event_data["vote"] chain = Chain.objects.get(name=chain_id) round_obj = Round.objects.get(on_chain_id=round_id, chain=chain) - voter, _ = Account.objects.get_or_create(id=vote_data['voter'], chain=chain) - voted_at = datetime.fromtimestamp(vote_data['voted_ms'] / 1000) + voter, _ = Account.objects.get_or_create(id=vote_data["voter"], chain=chain) + voted_at = datetime.fromtimestamp(vote_data["voted_ms"] / 1000) # Create or update the Vote vote, created = Vote.objects.update_or_create( round=round_obj, voter=voter, - defaults={ - 'tx_hash': tx_hash, - 'voted_at': voted_at - } + defaults={"tx_hash": tx_hash, "voted_at": voted_at}, ) - - # Process vote pairs - for pick in vote_data['picks']: + for pick in vote_data["picks"]: if chain_id == "NEAR": - pair_id = pick['pair_id'] - project_id = pick['voted_project'] - + pair_id = pick["pair_id"] + project_id = pick["voted_project"] else: - pair_id = pick['pair_id'] - project_id = Project.objects.get(on_chain_id=pick['project_id']).owner.id + pair_id = pick["pair_id"] + project_id = Project.objects.get( + on_chain_id=pick["project_id"] + ).owner.id pair_data = get_pair_projects(pair_id, round_id, chain_id) logger.info(f"pair data from contract...:,{pair_data}") if pair_data: - project_id_1, project_id_2 = pair_data.get('projects') + project_id_1, project_id_2 = pair_data.get("projects") if chain_id == "stellar": - project_1 = Project.objects.get(on_chain_id=project_id_1).owner.id - project_2 = Project.objects.get(on_chain_id=project_id_2).owner.id + project_1 = Project.objects.get( + on_chain_id=project_id_1 + ).owner.id + project_2 = Project.objects.get( + on_chain_id=project_id_2 + ).owner.id else: project_1 = project_id_1 project_2 = project_id_2 - vp, created = VotePair.objects.update_or_create( vote=vote, pair_id=pair_id, - defaults={'voted_project_id': project_id} + defaults={"voted_project_id": project_id}, ) vp.projects.add(project_1) vp.projects.add(project_2) - logger.info(f"Processed vote for Round: {round_id}, Voter: {voter.id}, Project: {project_id}") + logger.info( + f"Processed vote for Round: {round_id}, Voter: {voter.id}, Project: {project_id}" + ) return True except Exception as e: logger.error(f"Error processing vote for Round: {str(e)}") @@ -1556,64 +1639,68 @@ def process_project_event(event_data, chain_id="stellar"): chain = Chain.objects.get(name=chain_id) # Create or get the owner Account - owner, _ = Account.objects.get_or_create(defaults={"chain":chain}, id=project_data['owner']) + owner, _ = Account.objects.get_or_create( + defaults={"chain": chain}, id=project_data["owner"] + ) # Create or get the payout Account # Create the Project project, created = Project.objects.update_or_create( - on_chain_id=project_data['id'], + on_chain_id=project_data["id"], defaults={ - 'image_url': project_data['image_url'], - 'video_url': project_data['video_url'], - 'name': project_data['name'], - 'overview': project_data['overview'], - 'owner': owner, - 'status': ProjectStatus("NEW").name, - 'submited_ms': project_data['submited_ms'], - 'updated_ms': project_data['updated_ms'], - } + "image_url": project_data["image_url"], + "video_url": project_data["video_url"], + "name": project_data["name"], + "overview": project_data["overview"], + "owner": owner, + "status": ProjectStatus("NEW").name, + "submited_ms": project_data["submited_ms"], + "updated_ms": project_data["updated_ms"], + }, ) - for contact_data in project_data['contacts']: + for contact_data in project_data["contacts"]: contact, _ = ProjectContact.objects.get_or_create( - name=contact_data['name'], - value=contact_data['value'] + name=contact_data["name"], value=contact_data["value"] ) project.contacts.add(contact) - for contract_data in project_data['contracts']: + for contract_data in project_data["contracts"]: contract, _ = ProjectContract.objects.get_or_create( - name=contract_data['name'], - contract_address=contract_data['contract_address'] + name=contract_data["name"], + contract_address=contract_data["contract_address"], ) project.contracts.add(contract) # Create and associate ProjectRepositories - for repo_data in project_data['repositories']: + for repo_data in project_data["repositories"]: repo, _ = ProjectRepository.objects.get_or_create( - label=repo_data['label'], - url=repo_data['url'] + label=repo_data["label"], url=repo_data["url"] ) project.repositories.add(repo) - for funding_data in project_data['funding_histories']: + for funding_data in project_data["funding_histories"]: ProjectFundingHistory.objects.create( - source=funding_data['source'], - amount=funding_data['amount'], - denomination=funding_data['denomination'], # Note: There's a typo in the event data - description=funding_data['description'], - timestamp=timezone.datetime.fromtimestamp(funding_data['funded_ms'] / 1000) + source=funding_data["source"], + amount=funding_data["amount"], + denomination=funding_data[ + "denomination" + ], # Note: There's a typo in the event data + description=funding_data["description"], + timestamp=timezone.datetime.fromtimestamp( + funding_data["funded_ms"] / 1000 + ), ) # Associate admins - for admin_address in project_data['admins']: + for admin_address in project_data["admins"]: admin, _ = Account.objects.get_or_create(id=admin_address) project.admins.add(admin) # Associate team members - for team_member_data in project_data.get('team_members', []): - team_member, _ = Account.objects.get_or_create(id=team_member_data['value']) + for team_member_data in project_data.get("team_members", []): + team_member, _ = Account.objects.get_or_create(id=team_member_data["value"]) project.team_members.add(team_member) if created: @@ -1628,86 +1715,140 @@ def process_project_event(event_data, chain_id="stellar"): return False - def create_or_update_round(event_data, contract_id, timestamp, chain_id="stellar"): try: logger.info(f"create_or_update_round: {event_data}, {contract_id}, {chain_id}") # Create Round if chain_id == "NEAR": - event_data = event_data.get('round_detail') - round_id = event_data.get('id') - owner_address = event_data.get('owner') + event_data = event_data.get("round_detail") + round_id = event_data.get("id") + owner_address = event_data.get("owner") chain = Chain.objects.get(name=chain_id) - owner, _ = Account.objects.get_or_create(defaults={"chain":chain}, id=owner_address) - factory_contract, _ = Account.objects.get_or_create(defaults={"chain":chain}, id=contract_id) - remaining_dist_address = event_data.get('remaining_dist_address', event_data.get('remaining_funds_redistribution_recipient')) + owner, _ = Account.objects.get_or_create( + defaults={"chain": chain}, id=owner_address + ) + factory_contract, _ = Account.objects.get_or_create( + defaults={"chain": chain}, id=contract_id + ) + remaining_dist_address = event_data.get( + "remaining_dist_address", + event_data.get("remaining_funds_redistribution_recipient"), + ) if remaining_dist_address: - remaining_dist_address_obj, _ = Account.objects.get_or_create(defaults={"chain":chain}, id=remaining_dist_address) + remaining_dist_address_obj, _ = Account.objects.get_or_create( + defaults={"chain": chain}, id=remaining_dist_address + ) - remaining_dist_by = event_data.get('remaining_dist_by', event_data.get('remaining_funds_redistributed_by')) + remaining_dist_by = event_data.get( + "remaining_dist_by", event_data.get("remaining_funds_redistributed_by") + ) if remaining_dist_by: - remaining_dist_by_obj, _ = Account.objects.get_or_create(defaults={"chain":chain}, id=remaining_dist_by) + remaining_dist_by_obj, _ = Account.objects.get_or_create( + defaults={"chain": chain}, id=remaining_dist_by + ) - if event_data.get('round_complete_ms', event_data.get('round_complete')): - round_time_stamp = datetime.fromtimestamp(event_data.get('round_complete_ms', event_data.get('round_complete')) / 1000) + if event_data.get("round_complete_ms", event_data.get("round_complete")): + round_time_stamp = datetime.fromtimestamp( + event_data.get("round_complete_ms", event_data.get("round_complete")) + / 1000 + ) else: round_time_stamp = None if chain_id == "NEAR": use_vault = True else: - use_vault = event_data.get('use_vault', False) + use_vault = event_data.get("use_vault", False) round_obj, created = Round.objects.update_or_create( on_chain_id=round_id, chain=chain, defaults={ - 'owner': owner, - 'factory_contract': factory_contract, - 'chain': chain, - 'name': event_data.get('name'), - 'description': event_data.get('description'), - 'expected_amount': event_data.get('expected_amount'), - 'application_start': datetime.fromtimestamp(event_data.get('application_start_ms') / 1000) if event_data.get('application_start_ms') else None, - 'application_end': datetime.fromtimestamp(event_data.get('application_end_ms') / 1000) if event_data.get('application_end_ms') else None, - 'voting_start': datetime.fromtimestamp(event_data.get('voting_start_ms') / 1000), - 'voting_end': datetime.fromtimestamp(event_data.get('voting_end_ms') / 1000), - 'use_whitelist_voting': event_data.get('use_whitelist_voting', False), - 'use_whitelist_application': event_data.get('use_whitelist_application', False), - 'application_wl_list_id': event_data.get('application_wl_list_id'), - 'voting_wl_list_id': event_data.get('voting_wl_list_id'), - 'use_vault': use_vault or False, - 'num_picks_per_voter': event_data.get('num_picks_per_voter'), - 'max_participants': event_data.get('max_participants'), - 'allow_applications': event_data.get('allow_applications'), - 'allow_remaining_dist': event_data.get('allow_remaining_dist', event_data.get('allow_remaining_funds_redistribution')), - 'compliance_end': datetime.fromtimestamp(event_data.get('compliance_end_ms') / 1000) if event_data.get('compliance_end_ms') else None, - 'compliance_period_ms': event_data.get('compliance_period_ms'), - 'compliance_req_desc': event_data.get('compliance_req_desc', event_data.get('compliance_requirement_description')), - 'cooldown_end': datetime.fromtimestamp(event_data.get('cooldown_end_ms') / 1000) if event_data.get('cooldown_end_ms') else None, - 'cooldown_period_ms': event_data.get('cooldown_period_ms'), - 'is_video_required': event_data.get('is_video_required') or event_data.get('application_requires_video', False) , - 'referrer_fee_basis_points': event_data.get('referrer_fee_basis_points'), - 'remaining_dist_address_id': remaining_dist_address, - 'remaining_dist_at_ms': datetime.fromtimestamp(event_data.get('remaining_dist_at_ms') / 1000) if event_data.get('remaining_dist_at_ms') else None, - 'remaining_dist_by_id': remaining_dist_by, - 'remaining_dist_memo': event_data.get('remaining_dist_memo', event_data.get('remaining_funds_redistribution_memo')), - 'round_complete': round_time_stamp, - 'vault_total_deposits': event_data.get('vault_total_deposits'), - 'minimum_deposit': event_data.get('minimum_deposit'), - 'current_vault_balance': event_data.get('current_vault_balance'), - 'deployed_at': timestamp - } + "owner": owner, + "factory_contract": factory_contract, + "chain": chain, + "name": event_data.get("name"), + "description": event_data.get("description"), + "expected_amount": event_data.get("expected_amount"), + "application_start": datetime.fromtimestamp( + event_data.get("application_start_ms") / 1000 + ) + if event_data.get("application_start_ms") + else None, + "application_end": datetime.fromtimestamp( + event_data.get("application_end_ms") / 1000 + ) + if event_data.get("application_end_ms") + else None, + "voting_start": datetime.fromtimestamp( + event_data.get("voting_start_ms") / 1000 + ), + "voting_end": datetime.fromtimestamp( + event_data.get("voting_end_ms") / 1000 + ), + "use_whitelist_voting": event_data.get("use_whitelist_voting", False), + "use_whitelist_application": event_data.get( + "use_whitelist_application", False + ), + "application_wl_list_id": event_data.get("application_wl_list_id"), + "voting_wl_list_id": event_data.get("voting_wl_list_id"), + "use_vault": use_vault or False, + "num_picks_per_voter": event_data.get("num_picks_per_voter"), + "max_participants": event_data.get("max_participants"), + "allow_applications": event_data.get("allow_applications"), + "allow_remaining_dist": event_data.get( + "allow_remaining_dist", + event_data.get("allow_remaining_funds_redistribution"), + ), + "compliance_end": datetime.fromtimestamp( + event_data.get("compliance_end_ms") / 1000 + ) + if event_data.get("compliance_end_ms") + else None, + "compliance_period_ms": event_data.get("compliance_period_ms"), + "compliance_req_desc": event_data.get( + "compliance_req_desc", + event_data.get("compliance_requirement_description"), + ), + "cooldown_end": datetime.fromtimestamp( + event_data.get("cooldown_end_ms") / 1000 + ) + if event_data.get("cooldown_end_ms") + else None, + "cooldown_period_ms": event_data.get("cooldown_period_ms"), + "is_video_required": event_data.get("is_video_required") + or event_data.get("application_requires_video", False), + "referrer_fee_basis_points": event_data.get( + "referrer_fee_basis_points" + ), + "remaining_dist_address_id": remaining_dist_address, + "remaining_dist_at_ms": datetime.fromtimestamp( + event_data.get("remaining_dist_at_ms") / 1000 + ) + if event_data.get("remaining_dist_at_ms") + else None, + "remaining_dist_by_id": remaining_dist_by, + "remaining_dist_memo": event_data.get( + "remaining_dist_memo", + event_data.get("remaining_funds_redistribution_memo"), + ), + "round_complete": round_time_stamp, + "vault_total_deposits": event_data.get("vault_total_deposits"), + "minimum_deposit": event_data.get("minimum_deposit"), + "current_vault_balance": event_data.get("current_vault_balance"), + "deployed_at": timestamp, + }, ) - for admin_address in event_data.get('admins', []): - admin, _ = Account.objects.get_or_create(defaults={"chain":chain}, id=admin_address) + for admin_address in event_data.get("admins", []): + admin, _ = Account.objects.get_or_create( + defaults={"chain": chain}, id=admin_address + ) round_obj.admins.add(admin) # Create contacts for the round - for contact in event_data.get('contacts', []): + for contact in event_data.get("contacts", []): contact_obj, created = ProjectContact.objects.update_or_create( - name=contact['name'], - value=contact['value'] + name=contact["name"], value=contact["value"] ) round_obj.contacts.add(contact_obj) @@ -1722,12 +1863,13 @@ def process_application_to_round(event_data, tx_hash): try: # Process application to Round round_id, application_data = event_data[0], event_data[1] - applicant_id = application_data.get('applicant_id') - status = PotApplicationStatus[application_data['status'].upper()] - submitted_at = datetime.fromtimestamp(application_data['submited_ms'] / 1000) + applicant_id = application_data.get("applicant_id") + status = PotApplicationStatus[application_data["status"].upper()] + submitted_at = datetime.fromtimestamp(application_data["submited_ms"] / 1000) updated_at = ( - datetime.fromtimestamp(application_data['updated_ms'] / 1000) - if application_data['updated_ms'] else None + datetime.fromtimestamp(application_data["updated_ms"] / 1000) + if application_data["updated_ms"] + else None ) round_obj = Round.objects.get(on_chain_id=round_id) @@ -1737,46 +1879,50 @@ def process_application_to_round(event_data, tx_hash): round=round_obj, applicant=applicant, defaults={ - 'message': application_data['applicant_note'], - 'status': status, - 'submitted_at': submitted_at, - 'updated_at': updated_at, - 'tx_hash': tx_hash - } + "message": application_data["applicant_note"], + "status": status, + "submitted_at": submitted_at, + "updated_at": updated_at, + "tx_hash": tx_hash, + }, + ) + logger.info( + f"Processed application for Round: {round_id}, Applicant: {applicant_id}" ) - logger.info(f"Processed application for Round: {round_id}, Applicant: {applicant_id}") return True except Exception as e: logger.error(f"Error processing rounds applications event: {str(e)}") return False - def create_round_application(event_data, tx_hash, chain_id="stellar"): try: logger.info(f"create_round_application: {event_data}, {tx_hash}, {chain_id}") if type(event_data) == list: round_id, application_data = event_data[0], event_data[1] else: - event_data = event_data['application'] + event_data = event_data["application"] round_id, application_data = event_data["round_id"], event_data chain = Chain.objects.get(name=chain_id) - applicant, _ = Account.objects.get_or_create(defaults={"chain":chain}, id=application_data["applicant_id"]) + applicant, _ = Account.objects.get_or_create( + defaults={"chain": chain}, id=application_data["applicant_id"] + ) round_obj = Round.objects.get(on_chain_id=round_id, chain=chain) if chain_id == "NEAR": - status = PotApplicationStatus[application_data['status'].upper()] + status = PotApplicationStatus[application_data["status"].upper()] else: - status = PotApplicationStatus[application_data['status'][0].upper()] + status = PotApplicationStatus[application_data["status"][0].upper()] logger.info(f"Creating application for round: {round_id}") appl_defaults = { "message": application_data["applicant_note"], - "submitted_at": datetime.fromtimestamp(application_data["submited_ms"] / 1000), + "submitted_at": datetime.fromtimestamp( + application_data["submited_ms"] / 1000 + ), "status": status, "tx_hash": tx_hash, } - PotApplication.objects.update_or_create( applicant=applicant, round=round_obj, @@ -1792,17 +1938,21 @@ def create_round_application(event_data, tx_hash, chain_id="stellar"): def process_rounds_deposit_event(event_data, tx_hash, chain_id="stellar"): try: - logger.info(f"process_rounds_deposit_event: {event_data}, {tx_hash}, {chain_id}") + logger.info( + f"process_rounds_deposit_event: {event_data}, {tx_hash}, {chain_id}" + ) # Process deposit event if type(event_data) == list: round_id, deposit_data = event_data else: - event_data = event_data['deposit'] + event_data = event_data["deposit"] round_id, deposit_data = event_data["round_id"], event_data chain = Chain.objects.get(name=chain_id) round_obj = Round.objects.get(on_chain_id=round_id, chain=chain) amount = deposit_data["total_amount"] - depositor, _ = Account.objects.get_or_create(defaults={"chain":chain}, id=deposit_data["depositor_id"]) + depositor, _ = Account.objects.get_or_create( + defaults={"chain": chain}, id=deposit_data["depositor_id"] + ) # Create or update a RoundDeposit object deposit, created = RoundDeposit.objects.update_or_create( @@ -1810,28 +1960,35 @@ def process_rounds_deposit_event(event_data, tx_hash, chain_id="stellar"): on_chain_id=deposit_data.get("deposit_id", deposit_data.get("id")), depositor=depositor, defaults={ - 'amount': amount, - 'protocol_fee': deposit_data["protocol_fee"], - 'referrer_fee': deposit_data["referrer_fee"], - 'memo': deposit_data["memo"], - 'tx_hash': tx_hash, - 'deposit_at': datetime.fromtimestamp(deposit_data["deposited_at"] / 1000), - } + "amount": amount, + "protocol_fee": deposit_data["protocol_fee"], + "referrer_fee": deposit_data["referrer_fee"], + "memo": deposit_data["memo"], + "tx_hash": tx_hash, + "deposit_at": datetime.fromtimestamp( + deposit_data["deposited_at"] / 1000 + ), + }, ) - round_obj.vault_total_deposits = str(int(round_obj.vault_total_deposits or 0) + int(amount)) - round_obj.current_vault_balance = str(int(round_obj.current_vault_balance or 0) + int(deposit_data["net_amount"])) + round_obj.vault_total_deposits = str( + int(round_obj.vault_total_deposits or 0) + int(amount) + ) + round_obj.current_vault_balance = str( + int(round_obj.current_vault_balance or 0) + int(deposit_data["net_amount"]) + ) round_obj.save() round_obj.update_vault_usd_equivalent() - logger.info(f"Processed deposit for Round: {round_id}, Depositor: {depositor.id}, Amount: {amount}") + logger.info( + f"Processed deposit for Round: {round_id}, Depositor: {depositor.id}, Amount: {amount}" + ) return True except Exception as e: logger.error(f"Error processing deposits to rounds: {str(e)}") return False - def create_round_payout(event_data, tx_hash, chain_id="stellar"): try: logger.info(f"create_round_payout: {event_data}, {tx_hash}, {chain_id}") @@ -1841,10 +1998,10 @@ def create_round_payout(event_data, tx_hash, chain_id="stellar"): memo = payout_data.get("memo") chain = Chain.objects.get(name=chain_id) - token_acct, _ = Account.objects.get_or_create(defaults={"chain":chain},id=chain_id.lower()) - token, _ = Token.objects.get_or_create( - account=token_acct + token_acct, _ = Account.objects.get_or_create( + defaults={"chain": chain}, id=chain_id.lower() ) + token, _ = Token.objects.get_or_create(account=token_acct) round_obj = Round.objects.get(on_chain_id=round_id, chain=chain) payout = PotPayout( @@ -1858,7 +2015,9 @@ def create_round_payout(event_data, tx_hash, chain_id="stellar"): tx_hash=tx_hash, ) payout.save() - logger.info(f"Created payout for round {round_id} to {recipient_id} for amount {amount}, on chain {chain_id}") + logger.info( + f"Created payout for round {round_id} to {recipient_id} for amount {amount}, on chain {chain_id}" + ) return True except Exception as e: logger.error(f"Error creating round payout: {str(e)}") @@ -1873,7 +2032,9 @@ def update_round_payout(event_data, tx_hash, chain_id="stellar"): amount = payout_data["amount"] paid_at_ms = payout_data.get("paid_at_ms") chain = Chain.objects.get(name=chain_id) - recipient_id, _ = Account.objects.get_or_create(defaults={"chain":chain}, id=payout_data["recipient_id"]) + recipient_id, _ = Account.objects.get_or_create( + defaults={"chain": chain}, id=payout_data["recipient_id"] + ) memo = payout_data.get("memo") payout = PotPayout.objects.get(on_chain_id=payout_on_chain_id) payout.amount = amount @@ -1882,24 +2043,23 @@ def update_round_payout(event_data, tx_hash, chain_id="stellar"): payout.tx_hash = tx_hash payout.memo = memo payout.save() - logger.info(f"Updated payout {payout_on_chain_id} for recipient {recipient_id} with amount {amount}.") + logger.info( + f"Updated payout {payout_on_chain_id} for recipient {recipient_id} with amount {amount}." + ) return True except Exception as e: logger.error(f"Error updating Payout. {str(e)}") return False - - def handle_stellar_list(data, contract_id, timestamp, chain_id="stellar"): # receipt = block.receipts().filter(receiptId=receiptId)[0] try: logger.info("upserting involveed accts...") - owner_address = data.get('owner') + owner_address = data.get("owner") chain = Chain.objects.get(name=chain_id) - Account.objects.get_or_create(defaults={"chain":chain},id=owner_address) - + Account.objects.get_or_create(defaults={"chain": chain}, id=owner_address) logger.info(f"creating list..... {data}") @@ -1918,7 +2078,8 @@ def handle_stellar_list(data, contract_id, timestamp, chain_id="stellar"): if data.get("admins"): for admin_id in data["admins"]: - admin_object, _ = Account.objects.get_or_create(defaults={"chain":chain}, + admin_object, _ = Account.objects.get_or_create( + defaults={"chain": chain}, id=admin_id, ) listObject.admins.add(admin_object) @@ -1938,7 +2099,7 @@ def handle_stellar_list_update(data, contract_id, timestamp, chain_id="stellar") default_registration_status=data["default_registration_status"][0], name=data["name"], description=data["description"], - cover_image_url=data["cover_image_url"], + cover_image_url=data["cover_img_url"], admin_only_registrations=data["admin_only_registrations"], created_at=datetime.fromtimestamp(data["created_at"] / 1000), updated_at=datetime.fromtimestamp(data["updated_at"] / 1000), @@ -1949,13 +2110,17 @@ def handle_stellar_list_update(data, contract_id, timestamp, chain_id="stellar") return False -def handle_new_stellar_list_registration(data, contract_id, tx_hash, chain_id="stellar"): +def handle_new_stellar_list_registration( + data, contract_id, tx_hash, chain_id="stellar" +): logger.info(f"new Project data: {data}") # Prepare data for insertion chain = Chain.objects.get(name=chain_id) parent_list = List.objects.get(on_chain_id=data["list_id"]) try: - project = Account.objects.get_or_create({"chain":chain, "id": data["registrant_id"]}) + project = Account.objects.get_or_create( + {"chain": chain, "id": data["registrant_id"]} + ) except Exception as e: logger.error(f"Encountered error trying to get create acct: {e}") @@ -2001,11 +2166,11 @@ def handle_stellar_list_admin_ops(data, contract_id, timestamp, tx_hash): try: round_id, admins = data[0], data[1] logger.info(f"updating admins: {admins} for round {round_id}") - round_obj = Round.objects.get(on_chain_id=round_id) # select related? + round_obj = Round.objects.get(on_chain_id=round_id) # select related? chain = Chain.objects.get(name="stellar") for acct in admins: - admin, _ = Account.objects.get_or_create(defaults={"chain":chain},id=acct) + admin, _ = Account.objects.get_or_create(defaults={"chain": chain}, id=acct) contains = round_obj.admins.acontains(admin) if not contains: round_obj.admins.add(admin) @@ -2028,8 +2193,10 @@ def handle_stellar_list_admin_ops(data, contract_id, timestamp, tx_hash): logger.error(f"Failed to remove list admin, Error: {e}") return False + # Campaign Event Indexing Methods + async def handle_new_campaign(data: dict, created_at): """ Index a new campaign creation event. @@ -2114,7 +2281,6 @@ async def handle_new_campaign(data: dict, created_at): logger.error(f"Failed to index new campaign: {e}") - async def handle_update_campaign(data: dict): """ Index a campaign update event. @@ -2149,19 +2315,23 @@ async def handle_update_campaign(data: dict): "description": data["description"], "cover_image_url": data["cover_image_url"], "start_at": datetime.fromtimestamp(data["start_ms"] / 1000), - "end_at": datetime.fromtimestamp(data["end_ms"] / 1000) if data["end_ms"] else None, + "end_at": datetime.fromtimestamp(data["end_ms"] / 1000) + if data["end_ms"] + else None, "token": token, "target_amount": data["target_amount"], "min_amount": data["min_amount"], "max_amount": data["max_amount"], "allow_fee_avoidance": data["allow_fee_avoidance"], - } + }, ) # Fetch updated USD prices # await campaign.fetch_usd_prices_async() - logger.info(f"Successfully updated campaign: {campaign.on_chain_id}, or created? {created}") + logger.info( + f"Successfully updated campaign: {campaign.on_chain_id}, or created? {created}" + ) except Campaign.DoesNotExist: logger.error(f"Campaign {data['id']} not found for update") @@ -2178,7 +2348,9 @@ async def handle_delete_campaign(campaign_id: int): try: logger.info(f"Deleting campaign: {campaign_id}") - deleted_count, _ = await Campaign.objects.filter(on_chain_id=campaign_id).adelete() + deleted_count, _ = await Campaign.objects.filter( + on_chain_id=campaign_id + ).adelete() if deleted_count > 0: logger.info(f"Successfully deleted campaign: {campaign_id}") @@ -2189,7 +2361,6 @@ async def handle_delete_campaign(campaign_id: int): logger.error(f"Failed to delete campaign {campaign_id}: {e}") - async def handle_campaign_donation(data: dict, receipt_id): """ Index a campaign donation event. @@ -2253,7 +2424,7 @@ async def handle_campaign_donation(data: dict, receipt_id): on_chain_id=data["id"], campaign=campaign, donor=donor, - defaults=donation_defaults + defaults=donation_defaults, ) logger.info(f"before respective: {donation, created}") @@ -2271,11 +2442,17 @@ async def handle_campaign_donation(data: dict, receipt_id): total_amount = int(data["total_amount"]) net_amount = int(data["net_amount"]) - campaign.total_raised_amount = str(int(campaign.total_raised_amount) + total_amount) - campaign.net_raised_amount = str(int(campaign.net_raised_amount) + net_amount) + campaign.total_raised_amount = str( + int(campaign.total_raised_amount) + total_amount + ) + campaign.net_raised_amount = str( + int(campaign.net_raised_amount) + net_amount + ) await campaign.asave() - logger.info(f"Updated campaign {campaign.on_chain_id} totals: +{total_amount} total, +{net_amount} net") + logger.info( + f"Updated campaign {campaign.on_chain_id} totals: +{total_amount} total, +{net_amount} net" + ) except (ValueError, TypeError) as e: logger.error(f"Failed to update campaign totals: {e}") @@ -2285,7 +2462,6 @@ async def handle_campaign_donation(data: dict, receipt_id): logger.error(f"Failed to index campaign donation: {e}") - async def handle_campaign_donation_refund(data: dict, refunded_at): """ Index a campaign donation refund event. @@ -2306,33 +2482,41 @@ async def handle_campaign_donation_refund(data: dict, refunded_at): escrow_balance = data.get("escrow_balance") updated_count = await CampaignDonation.objects.filter( - on_chain_id__in=donation_ids, - campaign__on_chain_id=campaign_id + on_chain_id__in=donation_ids, campaign__on_chain_id=campaign_id ).aupdate(returned_at=refunded_at) if updated_count > 0: - logger.info(f"Successfully marked {updated_count} donations as refunded: {donation_ids}") + logger.info( + f"Successfully marked {updated_count} donations as refunded: {donation_ids}" + ) else: logger.warning(f"No donations found for refund: {donation_ids}") # Update campaign escrow balance and totals try: campaign = await Campaign.objects.aget(on_chain_id=campaign_id) - campaign.escrow_balance = str(int(campaign.escrow_balance) - int(escrow_balance)) + campaign.escrow_balance = str( + int(campaign.escrow_balance) - int(escrow_balance) + ) refunded_donations = CampaignDonation.objects.filter( - on_chain_id__in=donation_ids, - campaign__on_chain_id=campaign_id - ).values_list('total_amount', 'net_amount') + on_chain_id__in=donation_ids, campaign__on_chain_id=campaign_id + ).values_list("total_amount", "net_amount") total_refunded = sum(int(donation[0]) for donation in refunded_donations) net_refunded = sum(int(donation[1]) for donation in refunded_donations) - campaign.total_raised_amount = str(int(campaign.total_raised_amount) - total_refunded) - campaign.net_raised_amount = str(int(campaign.net_raised_amount) - net_refunded) + campaign.total_raised_amount = str( + int(campaign.total_raised_amount) - total_refunded + ) + campaign.net_raised_amount = str( + int(campaign.net_raised_amount) - net_refunded + ) await campaign.asave() - logger.info(f"Updated campaign {campaign_id}: -{total_refunded} total, -{net_refunded} net, escrow={campaign.escrow_balance}") + logger.info( + f"Updated campaign {campaign_id}: -{total_refunded} total, -{net_refunded} net, escrow={campaign.escrow_balance}" + ) except Campaign.DoesNotExist: logger.error(f"Campaign {campaign_id} not found for escrow balance update") except (ValueError, TypeError) as e: @@ -2354,7 +2538,9 @@ async def handle_campaign_donation_unescrowed(data: dict): """ try: - logger.info(f"Indexing campaign donation unescrow(release to recipient): {data}") + logger.info( + f"Indexing campaign donation unescrow(release to recipient): {data}" + ) donation_ids = data.get("donation_ids") updated_count = await CampaignDonation.objects.filter( @@ -2362,10 +2548,14 @@ async def handle_campaign_donation_unescrowed(data: dict): ).aupdate(escrowed=False) if updated_count > 0: - logger.info(f"Successfully marked donation {data['donation_ids']} as unescrowed") + logger.info( + f"Successfully marked donation {data['donation_ids']} as unescrowed" + ) else: - logger.warning(f"Donation {data['donation_ids']} not found to be unescrowed") + logger.warning( + f"Donation {data['donation_ids']} not found to be unescrowed" + ) except Exception as e: logger.error(f"Failed to index campaign donation unescrow: {e}") From 707b33fc692015b775572fe995e553b30ac59935 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Fri, 10 Oct 2025 23:18:17 +0100 Subject: [PATCH 09/22] make video url optional --- .gitignore | 2 + .../0010_alter_project_video_url.py | 18 ++ grantpicks/models.py | 62 +++--- indexer_app/tasks.py | 183 +++++++++++------- 4 files changed, 163 insertions(+), 102 deletions(-) create mode 100644 grantpicks/migrations/0010_alter_project_video_url.py diff --git a/.gitignore b/.gitignore index bef67d0..a89fe62 100644 --- a/.gitignore +++ b/.gitignore @@ -136,3 +136,5 @@ dmypy.json # static /static/ + +.DS_Store diff --git a/grantpicks/migrations/0010_alter_project_video_url.py b/grantpicks/migrations/0010_alter_project_video_url.py new file mode 100644 index 0000000..1954872 --- /dev/null +++ b/grantpicks/migrations/0010_alter_project_video_url.py @@ -0,0 +1,18 @@ +# Generated by Django 5.0.6 on 2025-10-10 22:17 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("grantpicks", "0009_round_minimum_deposit"), + ] + + operations = [ + migrations.AlterField( + model_name="project", + name="video_url", + field=models.URLField(blank=True, null=True), + ), + ] diff --git a/grantpicks/models.py b/grantpicks/models.py index 0d76a8d..f110a3e 100644 --- a/grantpicks/models.py +++ b/grantpicks/models.py @@ -26,11 +26,13 @@ class ProjectContact(models.Model): name = models.CharField(max_length=255) value = models.CharField(max_length=255) + class ProjectContract(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=255) contract_address = models.CharField(max_length=255) + class ProjectRepository(models.Model): id = models.AutoField(primary_key=True) label = models.CharField(max_length=255) @@ -44,21 +46,23 @@ class ProjectFundingHistory(models.Model): denomination = models.CharField(max_length=255) description = models.TextField() timestamp = models.DateTimeField(auto_now_add=True) - + class Project(models.Model): id = models.AutoField(primary_key=True) - on_chain_id =models.IntegerField( + on_chain_id = models.IntegerField( _("contract project id"), null=False, unique=True, help_text=_("Project id in contract"), ) image_url = models.URLField(max_length=200) - video_url = models.URLField(max_length=200) + video_url = models.URLField(max_length=200, null=True, blank=True) name = models.CharField(max_length=255) overview = models.TextField() - owner = models.ForeignKey(Account, related_name='owned_projects', on_delete=models.CASCADE) + owner = models.ForeignKey( + Account, related_name="owned_projects", on_delete=models.CASCADE + ) contacts = models.ManyToManyField( ProjectContact, related_name="contact_lists", @@ -89,8 +93,6 @@ class Project(models.Model): ) - - class Round(models.Model): id = models.AutoField( _("round id"), @@ -155,7 +157,7 @@ class Round(models.Model): null=False, help_text=_("Expected amount."), ) - + base_currency = models.CharField( _("base currency"), max_length=64, @@ -345,10 +347,7 @@ class Round(models.Model): ) class Meta: - unique_together = ('chain', 'on_chain_id') - - - + unique_together = ("chain", "on_chain_id") def update_vault_usd_equivalent(self): # first, see if there is a TokenHistoricalPrice within 1 day (or HISTORICAL_PRICE_QUERY_HOURS) of self.paid_at @@ -360,16 +359,17 @@ def update_vault_usd_equivalent(self): f"No USD price found for token {token.symbol} at {datetime.now()}" ) return - self.vault_total_deposits_usd = token.format_price(self.vault_total_deposits) * price_usd - self.current_vault_balance_usd = token.format_price(self.current_vault_balance) * price_usd - self.save() - logger.info( - f"Saved USD prices for round vault for round id: {self.id}" + self.vault_total_deposits_usd = ( + token.format_price(self.vault_total_deposits) * price_usd ) + self.current_vault_balance_usd = ( + token.format_price(self.current_vault_balance) * price_usd + ) + self.save() + logger.info(f"Saved USD prices for round vault for round id: {self.id}") except Exception as e: logger.error(f"Failed to calculate and stellar vault USD prices: {e}") - - + def save(self, *args, **kwargs): if self._state.adding: # If the account is being created (not updated) if not self.chain_id: @@ -456,11 +456,12 @@ class RoundDeposit(models.Model): ) class Meta: - unique_together = ('round', 'on_chain_id') + unique_together = ("round", "on_chain_id") + class Vote(models.Model): - round = models.ForeignKey(Round, on_delete=models.CASCADE, related_name='votes') - voter = models.ForeignKey(Account, on_delete=models.CASCADE, related_name='votes') + round = models.ForeignKey(Round, on_delete=models.CASCADE, related_name="votes") + voter = models.ForeignKey(Account, on_delete=models.CASCADE, related_name="votes") tx_hash = models.CharField( _("transaction hash"), null=True, @@ -470,25 +471,24 @@ class Vote(models.Model): voted_at = models.DateTimeField() class Meta: - unique_together = ('round', 'voter', 'voted_at') - + unique_together = ("round", "voter", "voted_at") class VotePair(models.Model): - vote = models.ForeignKey(Vote, on_delete=models.CASCADE, related_name='pairs') + vote = models.ForeignKey(Vote, on_delete=models.CASCADE, related_name="pairs") pair_id = models.PositiveIntegerField() - projects = models.ManyToManyField(Account, related_name='vote_pairs_included_in') + projects = models.ManyToManyField(Account, related_name="vote_pairs_included_in") voted_project = models.ForeignKey( Account, - on_delete=models.CASCADE, - related_name='vote_pairs_voted_for_in', + on_delete=models.CASCADE, + related_name="vote_pairs_voted_for_in", null=True, - blank=True + blank=True, ) - #old_project = models.ForeignKey(Account, on_delete=models.CASCADE, related_name='vote_pairs') + # old_project = models.ForeignKey(Account, on_delete=models.CASCADE, related_name='vote_pairs') class Meta: - unique_together = ('vote', 'pair_id') + unique_together = ("vote", "pair_id") class StellarEvent(models.Model): @@ -504,4 +504,4 @@ class StellarEvent(models.Model): blank=True, help_text=_("Transaction hash."), ) - processed = models.BooleanField(default=False) \ No newline at end of file + processed = models.BooleanField(default=False) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 064565c..900164a 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -26,11 +26,24 @@ from .logging import logger from .utils import ( - create_or_update_round, create_round_application, create_round_payout, - get_block_height, get_ledger_sequence, handle_stellar_list_admin_ops, handle_stellar_list_update, process_application_to_round, - process_project_event, process_rounds_deposit_event, process_vote_event, - save_block_height, update_application, update_approved_projects, - update_ledger_sequence, update_round_payout, handle_stellar_list + create_or_update_round, + create_round_application, + create_round_payout, + get_block_height, + get_ledger_sequence, + handle_stellar_list_admin_ops, + handle_stellar_list_update, + process_application_to_round, + process_project_event, + process_rounds_deposit_event, + process_vote_event, + save_block_height, + update_application, + update_approved_projects, + update_ledger_sequence, + update_round_payout, + handle_stellar_list, + handle_new_stellar_list_registration, ) CURRENT_BLOCK_HEIGHT_KEY = "current_block_height" @@ -44,12 +57,10 @@ async def indexer(from_block: int, to_block: int): logger.info(f"from block: {from_block}") lake_config = LakeConfig( - Network.TESTNET - if settings.ENVIRONMENT == "testnet" - else Network.MAINNET, + Network.TESTNET if settings.ENVIRONMENT == "testnet" else Network.MAINNET, settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, - from_block + from_block, ) _, streamer_messages_queue = streamer(lake_config) @@ -58,7 +69,9 @@ async def indexer(from_block: int, to_block: int): # Log time before fetching a new block fetch_start_time = time.time() # streamer_message is the current block - streamer_message = await asyncio.wait_for(streamer_messages_queue.get(), settings.INDEXER_STREAMER_WAIT_TIME) + streamer_message = await asyncio.wait_for( + streamer_messages_queue.get(), settings.INDEXER_STREAMER_WAIT_TIME + ) fetch_end_time = time.time() logger.info( f"Time to fetch new block: {fetch_end_time - fetch_start_time:.4f} seconds" @@ -92,9 +105,10 @@ async def indexer(from_block: int, to_block: int): f"Total time for one iteration: {iteration_end_time - fetch_start_time:.4f} seconds" ) - except asyncio.TimeoutError: - logger.warning("Stream stalled: no new blocks within timeout, restarting...") # raise Exception so sytemd can restart the worker + logger.warning( + "Stream stalled: no new blocks within timeout, restarting..." + ) # raise Exception so sytemd can restart the worker raise Exception("Stream stalled: restarting...") except Exception as e: @@ -111,7 +125,7 @@ def listen_to_near_events(): # Update below with desired network & block height start_block = get_block_height() # start_block = 112682360 - logger.info(f"what's the start block, pray tell? {start_block-1}") + logger.info(f"what's the start block, pray tell? {start_block - 1}") loop.run_until_complete(indexer(start_block - 1, None)) except WorkerLostError: pass # don't log to Sentry @@ -126,7 +140,7 @@ def spot_index_near_events(start_block): asyncio.set_event_loop(loop) try: - logger.info(f"Spot index start block: {start_block-1}") + logger.info(f"Spot index start block: {start_block - 1}") loop.run_until_complete(indexer(start_block - 1, None)) except WorkerLostError: pass # don't log to Sentry @@ -282,7 +296,6 @@ def update_pot_statistics(): @shared_task def update_account_statistics(): - accounts = Account.objects.all() accounts_count = accounts.count() jobs_logger.info(f"Updating statistics for {accounts_count} accounts...") @@ -334,40 +347,43 @@ def update_account_statistics(): ) jobs_logger.info(f"Account stats for {accounts.count()} accounts updated.") + def address_to_string(obj): if isinstance(obj, Address): return obj.address raise TypeError(f"Object of type {obj.__class__.__name__} is not JSON serializable") - # Todo: Change model so thatthe event indexer saves the event and queues a task to immediately process the event, # so we don;t have a separate beat that's looping through @shared_task def stellar_event_indexer(): - server = stellar_sdk.SorobanServer( - settings.STELLAR_RPC_URL - ) - contract_ids = [settings.STELLAR_CONTRACT_ID, settings.STELLAR_PROJECTS_REGISTRY_CONTRACT, settings.STELLAR_LIST_CONTRACT] - if contract_ids == ['', '', '']: + server = stellar_sdk.SorobanServer(settings.STELLAR_RPC_URL) + contract_ids = [ + settings.STELLAR_CONTRACT_ID, + settings.STELLAR_PROJECTS_REGISTRY_CONTRACT, + settings.STELLAR_LIST_CONTRACT, + ] + if contract_ids == ["", "", ""]: return start_sequence = get_ledger_sequence() # start_sequence = 668843 if not start_sequence: start_sequence = 58655649 - jobs_logger.info(f"Ingesting Stellar events from ledger {start_sequence}... contracts: {contract_ids}") + jobs_logger.info( + f"Ingesting Stellar events from ledger {start_sequence}... contracts: {contract_ids}" + ) try: # Fetch events for the current sequence events = server.get_events( start_ledger=start_sequence, filters=[ EventFilter( - event_type=EventFilterType.CONTRACT, - contract_ids=contract_ids - ) - ] + event_type=EventFilterType.CONTRACT, contract_ids=contract_ids + ) + ], ) stellar_events = [] ledger_timestamp = datetime.now() @@ -376,93 +392,118 @@ def stellar_event_indexer(): event_value = event.value if event.value is not None: event_value = stellar_sdk.scval.to_native(event.value) - event_value = json.loads(json.dumps(event_value, default=address_to_string)) - stellar_events.append(StellarEvent( - ledger_sequence=event.ledger, - event_type=event_name, - contract_id=event.contract_id, - ingested_at=event.ledger_close_at, - transaction_hash=event.transaction_hash, - data=event_value - )) + event_value = json.loads( + json.dumps(event_value, default=address_to_string) + ) + stellar_events.append( + StellarEvent( + ledger_sequence=event.ledger, + event_type=event_name, + contract_id=event.contract_id, + ingested_at=event.ledger_close_at, + transaction_hash=event.transaction_hash, + data=event_value, + ) + ) if len(stellar_events) > 0: - StellarEvent.objects.bulk_create( - objs=stellar_events, - ignore_conflicts=True - ) + StellarEvent.objects.bulk_create(objs=stellar_events, ignore_conflicts=True) ledger_timestamp = event.ledger_close_at - jobs_logger.info(f"Ingested {len(stellar_events)} Stellar events from ledger {start_sequence} to {events.latest_ledger}...") + jobs_logger.info( + f"Ingested {len(stellar_events)} Stellar events from ledger {start_sequence} to {events.latest_ledger}..." + ) update_ledger_sequence(events.latest_ledger, ledger_timestamp) except Exception as e: jobs_logger.error(f"Error processing ledger {start_sequence}: {e}") - @shared_task def process_stellar_events(): - unprocessed_events = StellarEvent.objects.filter(processed=False).order_by('id') - jobs_logger.info(f"Processing {unprocessed_events.count()} unprocessed Stellar events...") + unprocessed_events = StellarEvent.objects.filter(processed=False).order_by("id") + jobs_logger.info( + f"Processing {unprocessed_events.count()} unprocessed Stellar events..." + ) for event in unprocessed_events: try: event_data = event.data event_name = event.event_type - if event_name == 'c_project': + if event_name == "c_project": event.processed = process_project_event(event_data) - elif event_name == 'c_round' or event_name == 'u_round': - + elif event_name == "c_round" or event_name == "u_round": # Mark event as processed - event.processed = create_or_update_round(event_data, event.contract_id, event.ingested_at) - - elif event_name == 'apply_to_round': + event.processed = create_or_update_round( + event_data, event.contract_id, event.ingested_at + ) + elif event_name == "apply_to_round": # Mark event as processed - event.processed = process_application_to_round(event_data, event.transaction_hash) - - elif event_name == 'c_app': + event.processed = process_application_to_round( + event_data, event.transaction_hash + ) - event.processed = create_round_application(event_data, event.transaction_hash) + elif event_name == "c_app": + event.processed = create_round_application( + event_data, event.transaction_hash + ) - elif event_name == 'u_app': # application review and aproval + elif event_name == "u_app": # application review and aproval event.processed = update_application(event_data, event.transaction_hash) - elif event_name == 'u_ap': - event.processed = update_approved_projects(event_data, time_stamp=event.ingested_at, tx_hash=event.transaction_hash) - - elif event_name == 'c_depo': - - event.processed = process_rounds_deposit_event(event_data, event.transaction_hash) + elif event_name == "u_ap": + event.processed = update_approved_projects( + event_data, + time_stamp=event.ingested_at, + tx_hash=event.transaction_hash, + ) - elif event_name == 'c_vote': + elif event_name == "c_depo": + event.processed = process_rounds_deposit_event( + event_data, event.transaction_hash + ) + elif event_name == "c_vote": event.processed = process_vote_event(event_data, event.transaction_hash) elif event_name == "c_pay": - event.processed = create_round_payout(event_data, event.transaction_hash) + event.processed = create_round_payout( + event_data, event.transaction_hash + ) elif event_name == "u_pay": - - event.processed = update_round_payout(event_data, event.transaction_hash) + event.processed = update_round_payout( + event_data, event.transaction_hash + ) elif event_name == "c_list": - event.processed = handle_stellar_list(event_data, event.contract_id, event.ingested_at) + event.processed = handle_stellar_list( + event_data, event.contract_id, event.ingested_at + ) elif event_name == "u_list": - event.processed = handle_stellar_list_update(event_data, event.contract_id, event.ingested_at) + event.processed = handle_stellar_list_update( + event_data, event.contract_id, event.ingested_at + ) elif event_name == "c_reg": - event.processed = handle_stellar_list(event_data, event.contract_id, event.transaction_hash) + event.processed = handle_new_stellar_list_registration( + event_data, event.contract_id, event.transaction_hash + ) elif event_name == "u_adm": - event.processed = handle_stellar_list_admin_ops(event_data, event.contract_id, event.ingested_at, event.transaction_hash) + event.processed = handle_stellar_list_admin_ops( + event_data, + event.contract_id, + event.ingested_at, + event.transaction_hash, + ) event.save() except Exception as e: - jobs_logger.error(f"Error processing Stellar event { event_name, event.id}: {e}") + jobs_logger.error( + f"Error processing Stellar event {event_name, event.id}: {e}" + ) jobs_logger.info(f"Finished processing Stellar events.") - - @task_revoked.connect def on_task_revoked(request, terminated, signum, expired, **kwargs): logger.info( From 27cc9129a93d76aae8432174aa2cee5b89d35560 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Sat, 11 Oct 2025 01:45:57 +0100 Subject: [PATCH 10/22] list registration --- indexer_app/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 5568d61..838d199 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -2113,8 +2113,9 @@ def handle_stellar_list_update(data, contract_id, timestamp, chain_id="stellar") def handle_new_stellar_list_registration( data, contract_id, tx_hash, chain_id="stellar" ): - logger.info(f"new Project data: {data}") + logger.info(f"new list reg data: {data}") # Prepare data for insertion + data = data[2] chain = Chain.objects.get(name=chain_id) parent_list = List.objects.get(on_chain_id=data["list_id"]) try: From f2684cfedaa3c1309dfd627cd7e7f15a22f4d626 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Sat, 11 Oct 2025 11:33:38 +0100 Subject: [PATCH 11/22] filter with chain name --- indexer_app/utils.py | 38 +++++++++++++++++++++++++++----------- 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 838d199..fb75bbf 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -454,7 +454,9 @@ async def handle_list_update( logger.info(f"updating list from result..... {data}") - listObject = await List.objects.filter(on_chain_id=data["id"]).aupdate( + listObject = await List.objects.filter( + on_chain_id=data["id"], chain__name="NEAR" + ).aupdate( owner_id=data["owner"], default_registration_status=data["default_registration_status"], name=data["name"], @@ -478,7 +480,9 @@ async def handle_list_update( async def handle_delete_list(data: dict): try: logger.info(f"deleting list..... {data}") - lst = await List.objects.filter(on_chain_id=data["list_id"]).adelete() + lst = await List.objects.filter( + on_chain_id=data["list_id"], chain__name="NEAR" + ).adelete() except Exception as e: logger.error(f"Failed to delete, Error: {e}") @@ -507,7 +511,9 @@ async def handle_new_list_registration( # Prepare data for insertion project_list = [] insert_data = [] - parent_list = await List.objects.aget(on_chain_id=reg_data[0]["list_id"]) + parent_list = await List.objects.aget( + on_chain_id=reg_data[0]["list_id"], chain__name="NEAR" + ) for dt in reg_data: logger.info(f"dt: {dt}") project_list.append({"chain_id": 1, "id": dt["registrant_id"]}) @@ -566,7 +572,9 @@ async def handle_list_registration_removal( logger.info(f"list reg removal: {data}, {receiver_id}") try: - list_obj = await List.objects.aget(on_chain_id=data["list_id"]) + list_obj = await List.objects.aget( + on_chain_id=data["list_id"], chain__name="NEAR" + ) await list_obj.registrations.filter(id=data["registration_id"]).adelete() except Exception as e: @@ -756,7 +764,7 @@ async def handle_default_list_status_change( if result_data.get("cover_image_url"): list_update["cover_image_url"] = result_data["cover_image_url"] - await List.objects.filter(id=list_id).aupdate(**list_update) + await List.objects.filter(id=list_id, chain__name="NEAR").aupdate(**list_update) logger.info("List updated successfully.") except Exception as e: @@ -776,7 +784,9 @@ async def handle_list_upvote( up_default = {"created_at": created_at} - list_obj = await List.objects.aget(on_chain_id=data.get("list_id")) + list_obj = await List.objects.aget( + on_chain_id=data.get("list_id"), chain__name="NEAR" + ) await ListUpvote.objects.aupdate_or_create( list=list_obj, account_id=signer_id, defaults=up_default @@ -803,7 +813,9 @@ async def handle_list_upvote( async def handle_remove_upvote(data: dict, receiver_id: str, signer_id: str): try: logger.info(f"remove upvote from list: {data}, {receiver_id}") - list_obj = await List.objects.aget(on_chain_id=data.get("list_id")) + list_obj = await List.objects.aget( + on_chain_id=data.get("list_id"), chain__name="NEAR" + ) await ListUpvote.objects.filter(list=list_obj, account_id=signer_id).adelete() logger.info(f"Upvote removed successfully") @@ -934,7 +946,9 @@ async def handle_payout_challenge_response( async def handle_list_admin_ops(data, receiver_id, signer_id, receiptId): try: logger.info(f"updating admin...: {data}, {receiver_id}") - list_obj = await List.objects.aget(on_chain_id=data["list_id"]) + list_obj = await List.objects.aget( + on_chain_id=data["list_id"], chain__name="NEAR" + ) for acct in data["admins"]: admin, _ = await Account.objects.aget_or_create( @@ -964,7 +978,7 @@ async def handle_list_admin_ops(data, receiver_id, signer_id, receiptId): async def handle_list_owner_change(data): try: logger.info(f"changing owner... ...: {data}") - await List.objects.filter(id=data["list_id"]).aupdate( + await List.objects.filter(id=data["list_id"], chain__name="NEAR").aupdate( **{"owner": data["new_owner_id"]} ) @@ -2094,7 +2108,9 @@ def handle_stellar_list_update(data, contract_id, timestamp, chain_id="stellar") try: logger.info(f"updating list from result..... {data}") - listObject = List.objects.filter(on_chain_id=data["id"]).update( + listObject = List.objects.filter( + on_chain_id=data["id"], chain__name=chain_id + ).update( owner_id=data["owner"], default_registration_status=data["default_registration_status"][0], name=data["name"], @@ -2117,7 +2133,7 @@ def handle_new_stellar_list_registration( # Prepare data for insertion data = data[2] chain = Chain.objects.get(name=chain_id) - parent_list = List.objects.get(on_chain_id=data["list_id"]) + parent_list = List.objects.get(on_chain_id=data["list_id"], chain=chain) try: project = Account.objects.get_or_create( {"chain": chain, "id": data["registrant_id"]} From 18e01d2fcec2f3e6abe051b1c2339fd881f0abb3 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Mon, 13 Oct 2025 15:23:34 +0100 Subject: [PATCH 12/22] fix list update on stellar --- indexer_app/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index fb75bbf..1988915 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -2117,8 +2117,8 @@ def handle_stellar_list_update(data, contract_id, timestamp, chain_id="stellar") description=data["description"], cover_image_url=data["cover_img_url"], admin_only_registrations=data["admin_only_registrations"], - created_at=datetime.fromtimestamp(data["created_at"] / 1000), - updated_at=datetime.fromtimestamp(data["updated_at"] / 1000), + created_at=datetime.fromtimestamp(data["created_ms"] / 1000), + updated_at=datetime.fromtimestamp(data["updated_ms"] / 1000), ) return True except Exception as e: From 7f60e0f9957ca225f42cd65c9ba491e28b46c5ee Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Tue, 14 Oct 2025 09:36:14 +0100 Subject: [PATCH 13/22] extend cors --- base/settings.py | 54 +++++++++++++++++++++++++++++++++--------------- 1 file changed, 37 insertions(+), 17 deletions(-) diff --git a/base/settings.py b/base/settings.py index 5ef90b6..1259f25 100644 --- a/base/settings.py +++ b/base/settings.py @@ -28,7 +28,10 @@ # SECURITY WARNING: keep the secret key used in production secret! # TODO: update before prod release -SECRET_KEY = os.environ.get("PL_DJANGO_SECRET_KEY", "django-insecure-=r_v_es6w6rxv42^#kc2hca6p%=fe_*cog_5!t%19zea!enlju") +SECRET_KEY = os.environ.get( + "PL_DJANGO_SECRET_KEY", + "django-insecure-=r_v_es6w6rxv42^#kc2hca6p%=fe_*cog_5!t%19zea!enlju", +) ALLOWED_HOSTS = [ "ec2-100-27-57-47.compute-1.amazonaws.com", @@ -36,7 +39,7 @@ "127.0.0.1", "dev.potlock.io", "test-dev.potlock.io", - "api.potlock.io" + "api.potlock.io", # "alpha.potlock.io", ] @@ -62,29 +65,45 @@ RECLAIM_APP_ID = os.environ.get("PL_RECLAIM_APP_ID") RECLAIM_APP_SECRET = os.environ.get("PL_RECLAIM_APP_SECRET") RECLAIM_TWITTER_PROVIDER_ID = os.environ.get("PL_RECLAIM_TWITTER_PROVIDER_ID") -INDEXER_STREAMER_WAIT_TIME = os.environ.get("PL_INDEXER_STREAMER_WAIT_TIME", 300) # in seconds +INDEXER_STREAMER_WAIT_TIME = os.environ.get( + "PL_INDEXER_STREAMER_WAIT_TIME", 300 +) # in seconds # POTLOCK_TLA = "potlock.testnet" if ENVIRONMENT == "testnet" else "potlock.near" -POTLOCK_TLA = "potlock.testnet" if ENVIRONMENT == "testnet" else ("staging.potlock.near" if ENVIRONMENT == "dev" else "potlock.near") +POTLOCK_TLA = ( + "potlock.testnet" + if ENVIRONMENT == "testnet" + else ("staging.potlock.near" if ENVIRONMENT == "dev" else "potlock.near") +) # NADABOT_TLA = "nadabot.testnet" if ENVIRONMENT == "testnet" else "nadabot.near" -NADABOT_TLA = "nadabot.testnet" if ENVIRONMENT == "testnet" else ("staging.nadabot.near" if ENVIRONMENT == "dev" else "nadabot.near") +NADABOT_TLA = ( + "nadabot.testnet" + if ENVIRONMENT == "testnet" + else ("staging.nadabot.near" if ENVIRONMENT == "dev" else "nadabot.near") +) STELLAR_CONTRACT_ID = os.environ.get("PL_STELLAR_CONTRACT_ID", "") -STELLAR_PROJECTS_REGISTRY_CONTRACT = os.environ.get("PL_STELLAR_PROJECTS_REGISTRY_CONTRACT", "") +STELLAR_PROJECTS_REGISTRY_CONTRACT = os.environ.get( + "PL_STELLAR_PROJECTS_REGISTRY_CONTRACT", "" +) STELLAR_LIST_CONTRACT = os.environ.get("PL_STELLAR_LIST_CONTRACT", "") NEAR_SOCIAL_CONTRACT_ADDRESS = ( "v1.social08.testnet" if ENVIRONMENT == "testnet" else "social.near" ) -NEAR_GRANTPICKS_CONTRACT_ID = "v2.grantpicks.potlock.testnet" if ENVIRONMENT == "testnet" else ("" if ENVIRONMENT == "dev" else "") +NEAR_GRANTPICKS_CONTRACT_ID = ( + "v2.grantpicks.potlock.testnet" + if ENVIRONMENT == "testnet" + else ("" if ENVIRONMENT == "dev" else "") +) # TODO: split settigns file by enviroment if ENVIRONMENT == "testnet": - POTLOCK_PATTERN = r'\.potlock\.testnet$' - NADABOT_PATTERN = r'\.nadabot\.testnet$' + POTLOCK_PATTERN = r"\.potlock\.testnet$" + NADABOT_PATTERN = r"\.nadabot\.testnet$" elif ENVIRONMENT == "dev": - POTLOCK_PATTERN = r'\.staging\.potlock\.near$' - NADABOT_PATTERN = r'\.staging\.nadabot\.near$' + POTLOCK_PATTERN = r"\.staging\.potlock\.near$" + NADABOT_PATTERN = r"\.staging\.nadabot\.near$" else: # mainnet/prod - POTLOCK_PATTERN = r'(? Date: Tue, 14 Oct 2025 09:36:58 +0100 Subject: [PATCH 14/22] extend cors --- base/settings.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/base/settings.py b/base/settings.py index 1259f25..0b928fd 100644 --- a/base/settings.py +++ b/base/settings.py @@ -237,6 +237,8 @@ "https://app.potlock.app", "https://app.potlock.org", "http://alpha.potlock.org", + "https://alpha.potlock.xyz", + "https://alpha.potlock.app", ] else: CORS_ALLOWED_ORIGINS = [ From 3f2ba377118fb19b82a77e02b0a6de655f1b02c3 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Wed, 22 Oct 2025 17:12:45 +0100 Subject: [PATCH 15/22] fix get or create --- indexer_app/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 1988915..dde3122 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -2136,7 +2136,7 @@ def handle_new_stellar_list_registration( parent_list = List.objects.get(on_chain_id=data["list_id"], chain=chain) try: project = Account.objects.get_or_create( - {"chain": chain, "id": data["registrant_id"]} + defaults={"chain": chain}, id=data["registrant_id"] ) except Exception as e: logger.error(f"Encountered error trying to get create acct: {e}") From 8bcab7a5c7473dd0c043d197075fac2ff5706413 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Thu, 23 Oct 2025 00:48:56 +0100 Subject: [PATCH 16/22] add onchain id to list reg --- indexer_app/utils.py | 4 ++-- .../0010_listregistration_on_chain_id.py | 22 +++++++++++++++++++ lists/models.py | 7 ++++-- 3 files changed, 29 insertions(+), 4 deletions(-) create mode 100644 lists/migrations/0010_listregistration_on_chain_id.py diff --git a/indexer_app/utils.py b/indexer_app/utils.py index dde3122..f0c917e 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -519,7 +519,7 @@ async def handle_new_list_registration( project_list.append({"chain_id": 1, "id": dt["registrant_id"]}) insert_data.append( { - "id": dt["id"], + "on_chain_id": data["id"], "registrant_id": dt["registrant_id"], "list_id": parent_list.id, "status": dt["status"], @@ -2146,7 +2146,7 @@ def handle_new_stellar_list_registration( try: _ = ListRegistration.objects.create( **{ - "id": data["id"], + "on_chain_id": data["id"], "registrant_id": data["registrant_id"], "list_id": parent_list.id, "status": data["status"], diff --git a/lists/migrations/0010_listregistration_on_chain_id.py b/lists/migrations/0010_listregistration_on_chain_id.py new file mode 100644 index 0000000..cb68b7f --- /dev/null +++ b/lists/migrations/0010_listregistration_on_chain_id.py @@ -0,0 +1,22 @@ +# Generated by Django 5.0.6 on 2025-10-22 23:48 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("lists", "0009_alter_list_on_chain_id_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="listregistration", + name="on_chain_id", + field=models.IntegerField( + help_text="list registration id in contract", + null=True, + verbose_name="list registration id on chain", + ), + ), + ] diff --git a/lists/models.py b/lists/models.py index 639d978..fef79cc 100644 --- a/lists/models.py +++ b/lists/models.py @@ -13,8 +13,6 @@ class ListRegistrationStatus(models.TextChoices): BLACKLISTED = "Blacklisted", "Blacklisted" - - class List(models.Model): id = models.AutoField( _("list id"), @@ -136,6 +134,11 @@ class ListRegistration(models.Model): primary_key=True, help_text=_("Registration id."), ) + on_chain_id = models.IntegerField( + _("list registration id on chain"), + null=True, + help_text=_("list registration id in contract"), + ) list = models.ForeignKey( List, on_delete=models.CASCADE, From 9f9f2da810ff6b9afdb846b61073cbede1247f88 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Fri, 24 Oct 2025 02:47:52 +0100 Subject: [PATCH 17/22] replace public address --- indexer_app/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index f0c917e..9a3548b 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -1550,7 +1550,7 @@ def get_pair_projects(pair_id: int, round_id: int, chain_id: str) -> Dict: stellar_sdk.scval.to_uint128(round_id), stellar_sdk.scval.to_uint32(pair_id), ] - public_key = "GAMFYFI7TIAPMLSAWIECFZCN52TR3NUIO74YM7ECBCPM6J743KENH367" # TODO: move to settings + public_key = "GAA3KC7HAHPZ2OGSAV5WBOFCJ3NSSPHKCYZAEI36DQJP2EB2FCGKSEFB" # TODO: move to settings acct = server.load_account(public_key) pair_result = server.simulate_transaction( From 98e8df1983dd6f6d20d004a6da19afcad54d92c5 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Sun, 26 Oct 2025 04:16:59 +0100 Subject: [PATCH 18/22] add list reg update --- indexer_app/tasks.py | 5 +++++ indexer_app/utils.py | 22 ++++++++++++++++++++-- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 900164a..69fa656 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -44,6 +44,7 @@ update_round_payout, handle_stellar_list, handle_new_stellar_list_registration, + update_list_registrations, ) CURRENT_BLOCK_HEIGHT_KEY = "current_block_height" @@ -487,6 +488,10 @@ def process_stellar_events(): event.processed = handle_new_stellar_list_registration( event_data, event.contract_id, event.transaction_hash ) + elif event_name == "u_reg": + event.processed = update_list_registrations( + event_data, event.contract_id, event.transaction_hash + ) elif event_name == "u_adm": event.processed = handle_stellar_list_admin_ops( event_data, diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 9a3548b..8c6534a 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -2133,7 +2133,6 @@ def handle_new_stellar_list_registration( # Prepare data for insertion data = data[2] chain = Chain.objects.get(name=chain_id) - parent_list = List.objects.get(on_chain_id=data["list_id"], chain=chain) try: project = Account.objects.get_or_create( defaults={"chain": chain}, id=data["registrant_id"] @@ -2148,7 +2147,7 @@ def handle_new_stellar_list_registration( **{ "on_chain_id": data["id"], "registrant_id": data["registrant_id"], - "list_id": parent_list.id, + "list_id": data["list_id"], "status": data["status"], "submitted_at": datetime.fromtimestamp(data["submitted_ms"] / 1000), "updated_at": datetime.fromtimestamp(data["updated_ms"] / 1000), @@ -2179,6 +2178,25 @@ def handle_new_stellar_list_registration( return False +def update_list_registrations(data, contract_id, chain_id="stellar"): + data = data[2] + + regUpdate = { + "status": data["status"][0], + "admin_notes": data["admin_notes"], + "updated_at": datetime.fromtimestamp(data["updated_ms"] / 1000), + } + + try: + # Perform the update + list = List.objects.get(on_chain_id=data["list_id"], chain__name=chain_id) + ListRegistration.objects.filter(on_chain_id=data["id"], list=list).update( + **regUpdate + ) + except Exception as e: + logger.error(f"Encountered error trying to update ListRegistration: {e}") + + def handle_stellar_list_admin_ops(data, contract_id, timestamp, tx_hash): try: round_id, admins = data[0], data[1] From bc6a3499517c9a5869400d2c4c8e9276b355d27c Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Sun, 26 Oct 2025 13:32:58 +0100 Subject: [PATCH 19/22] add model constraint --- indexer_app/utils.py | 4 ++++ ...istregistration_unique_on_chain_id_list.py | 20 +++++++++++++++++++ lists/models.py | 5 +++++ 3 files changed, 29 insertions(+) create mode 100644 lists/migrations/0011_listregistration_unique_on_chain_id_list.py diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 8c6534a..cab0f44 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -2187,6 +2187,10 @@ def update_list_registrations(data, contract_id, chain_id="stellar"): "updated_at": datetime.fromtimestamp(data["updated_ms"] / 1000), } + logger.info( + f"updating ListRegistration with data: {data} and updatdata: {regUpdate}" + ) + try: # Perform the update list = List.objects.get(on_chain_id=data["list_id"], chain__name=chain_id) diff --git a/lists/migrations/0011_listregistration_unique_on_chain_id_list.py b/lists/migrations/0011_listregistration_unique_on_chain_id_list.py new file mode 100644 index 0000000..f2da1bf --- /dev/null +++ b/lists/migrations/0011_listregistration_unique_on_chain_id_list.py @@ -0,0 +1,20 @@ +# Generated by Django 5.0.6 on 2025-10-26 12:29 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("accounts", "0006_alter_account_near_social_profile_data"), + ("lists", "0010_listregistration_on_chain_id"), + ] + + operations = [ + migrations.AddConstraint( + model_name="listregistration", + constraint=models.UniqueConstraint( + fields=("on_chain_id", "list"), name="unique_on_chain_id_list" + ), + ), + ] diff --git a/lists/models.py b/lists/models.py index fef79cc..fa01893 100644 --- a/lists/models.py +++ b/lists/models.py @@ -203,3 +203,8 @@ class Meta: indexes = [models.Index(fields=["id", "status"], name="idx_list_id_status")] unique_together = (("list", "registrant"),) + constraints = [ + models.UniqueConstraint( + fields=["on_chain_id", "list"], name="unique_on_chain_id_list" + ) + ] From 23a1b0266966446844b0968d22d3df2115af5259 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Sun, 26 Oct 2025 14:12:34 +0100 Subject: [PATCH 20/22] addd logging --- indexer_app/utils.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index cab0f44..588768c 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -2191,14 +2191,20 @@ def update_list_registrations(data, contract_id, chain_id="stellar"): f"updating ListRegistration with data: {data} and updatdata: {regUpdate}" ) + logger.debug( + f"checkout list query: {List.objects.filter(on_chain_id=data['list_id'], chain__name=chain_id)}, chain: {chain_id}, data: {data['list_id']}" + ) + try: # Perform the update list = List.objects.get(on_chain_id=data["list_id"], chain__name=chain_id) ListRegistration.objects.filter(on_chain_id=data["id"], list=list).update( **regUpdate ) + return True except Exception as e: logger.error(f"Encountered error trying to update ListRegistration: {e}") + return False def handle_stellar_list_admin_ops(data, contract_id, timestamp, tx_hash): From 114a7cc4fc1e90a46dfe40554820fe736db620bd Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Sun, 26 Oct 2025 15:09:22 +0100 Subject: [PATCH 21/22] addd logging --- indexer_app/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 588768c..28e7da4 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -2191,7 +2191,7 @@ def update_list_registrations(data, contract_id, chain_id="stellar"): f"updating ListRegistration with data: {data} and updatdata: {regUpdate}" ) - logger.debug( + logger.info( f"checkout list query: {List.objects.filter(on_chain_id=data['list_id'], chain__name=chain_id)}, chain: {chain_id}, data: {data['list_id']}" ) From 0feae39fce70566c46cb33a0720eedb6196bb608 Mon Sep 17 00:00:00 2001 From: Boluwatife Popoola Date: Sun, 26 Oct 2025 17:25:13 +0100 Subject: [PATCH 22/22] fix list reg updte --- indexer_app/tasks.py | 2 +- indexer_app/utils.py | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 69fa656..dc6ac49 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -490,7 +490,7 @@ def process_stellar_events(): ) elif event_name == "u_reg": event.processed = update_list_registrations( - event_data, event.contract_id, event.transaction_hash + event_data, event.contract_id ) elif event_name == "u_adm": event.processed = handle_stellar_list_admin_ops( diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 28e7da4..265eb58 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -2191,10 +2191,6 @@ def update_list_registrations(data, contract_id, chain_id="stellar"): f"updating ListRegistration with data: {data} and updatdata: {regUpdate}" ) - logger.info( - f"checkout list query: {List.objects.filter(on_chain_id=data['list_id'], chain__name=chain_id)}, chain: {chain_id}, data: {data['list_id']}" - ) - try: # Perform the update list = List.objects.get(on_chain_id=data["list_id"], chain__name=chain_id)